[ 525.301810] env[65758]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=65758) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 525.302179] env[65758]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=65758) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 525.302698] env[65758]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=65758) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 525.303051] env[65758]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 525.421690] env[65758]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=65758) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 525.431359] env[65758]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=65758) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 525.478016] env[65758]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 526.035786] env[65758]: INFO nova.virt.driver [None req-16322589-c104-4678-a577-0047b6d828bb None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 526.111110] env[65758]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 526.111317] env[65758]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 526.111438] env[65758]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=65758) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 529.344022] env[65758]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-e56466e1-7dc9-4951-ad35-6d557f6734a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.363380] env[65758]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=65758) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 529.363573] env[65758]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-b9291bd4-7a6a-40b7-a35a-35a0fc096564 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.397286] env[65758]: INFO oslo_vmware.api [-] Successfully established new session; session ID is e7bcd. [ 529.397467] env[65758]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.286s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 529.398112] env[65758]: INFO nova.virt.vmwareapi.driver [None req-16322589-c104-4678-a577-0047b6d828bb None None] VMware vCenter version: 7.0.3 [ 529.402083] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0392b46-d364-423f-9299-025877bd999a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.421064] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63693cdc-38f0-4f4e-bbea-d578a1c0fbd4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.427817] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bc124c-798d-4224-a82c-83b476db1da2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.435093] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947ad3bc-a816-4af6-89d1-3a04bc7e11a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.449039] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f46508-a957-494b-9ab1-45124daac111 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.455653] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d5359b-99ce-4979-ba60-9c50a258cc22 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.487263] env[65758]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-82800291-728a-4243-a13c-268967ad9021 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.493491] env[65758]: DEBUG nova.virt.vmwareapi.driver [None req-16322589-c104-4678-a577-0047b6d828bb None None] Extension org.openstack.compute already exists. {{(pid=65758) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 529.496250] env[65758]: INFO nova.compute.provider_config [None req-16322589-c104-4678-a577-0047b6d828bb None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 530.000237] env[65758]: DEBUG nova.context [None req-16322589-c104-4678-a577-0047b6d828bb None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),86346776-7868-481c-b1cc-f33cbe8e66ad(cell1) {{(pid=65758) load_cells /opt/stack/nova/nova/context.py:472}} [ 530.000583] env[65758]: INFO nova.utils [None req-16322589-c104-4678-a577-0047b6d828bb None None] The cell worker thread pool MainProcess.cell_worker is initialized [ 530.002636] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 530.002855] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 530.003559] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 530.004010] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Acquiring lock "86346776-7868-481c-b1cc-f33cbe8e66ad" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 530.004221] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Lock "86346776-7868-481c-b1cc-f33cbe8e66ad" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 530.005207] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Lock "86346776-7868-481c-b1cc-f33cbe8e66ad" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 530.027677] env[65758]: INFO dbcounter [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Registered counter for database nova_cell0 [ 530.036643] env[65758]: INFO dbcounter [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Registered counter for database nova_cell1 [ 530.039943] env[65758]: DEBUG oslo_db.sqlalchemy.engines [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=65758) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 530.040343] env[65758]: DEBUG oslo_db.sqlalchemy.engines [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=65758) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 530.045463] env[65758]: ERROR nova.db.main.api [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 530.045463] env[65758]: func(*args, **kwargs) [ 530.045463] env[65758]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_green.py", line 69, in __call__ [ 530.045463] env[65758]: self.work.run() [ 530.045463] env[65758]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_utils.py", line 45, in run [ 530.045463] env[65758]: result = self.fn(*self.args, **self.kwargs) [ 530.045463] env[65758]: File "/opt/stack/nova/nova/utils.py", line 584, in context_wrapper [ 530.045463] env[65758]: return func(*args, **kwargs) [ 530.045463] env[65758]: File "/opt/stack/nova/nova/context.py", line 420, in gather_result [ 530.045463] env[65758]: result = fn(*args, **kwargs) [ 530.045463] env[65758]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 530.045463] env[65758]: return f(*args, **kwargs) [ 530.045463] env[65758]: File "/opt/stack/nova/nova/objects/service.py", line 568, in _db_service_get_minimum_version [ 530.045463] env[65758]: return db.service_get_minimum_version(context, binaries) [ 530.045463] env[65758]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 530.045463] env[65758]: _check_db_access() [ 530.045463] env[65758]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 530.045463] env[65758]: stacktrace = ''.join(traceback.format_stack()) [ 530.045463] env[65758]: [ 530.046282] env[65758]: ERROR nova.db.main.api [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 530.046282] env[65758]: func(*args, **kwargs) [ 530.046282] env[65758]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_green.py", line 69, in __call__ [ 530.046282] env[65758]: self.work.run() [ 530.046282] env[65758]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_utils.py", line 45, in run [ 530.046282] env[65758]: result = self.fn(*self.args, **self.kwargs) [ 530.046282] env[65758]: File "/opt/stack/nova/nova/utils.py", line 584, in context_wrapper [ 530.046282] env[65758]: return func(*args, **kwargs) [ 530.046282] env[65758]: File "/opt/stack/nova/nova/context.py", line 420, in gather_result [ 530.046282] env[65758]: result = fn(*args, **kwargs) [ 530.046282] env[65758]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 530.046282] env[65758]: return f(*args, **kwargs) [ 530.046282] env[65758]: File "/opt/stack/nova/nova/objects/service.py", line 568, in _db_service_get_minimum_version [ 530.046282] env[65758]: return db.service_get_minimum_version(context, binaries) [ 530.046282] env[65758]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 530.046282] env[65758]: _check_db_access() [ 530.046282] env[65758]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 530.046282] env[65758]: stacktrace = ''.join(traceback.format_stack()) [ 530.046282] env[65758]: [ 530.047064] env[65758]: WARNING nova.objects.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 530.047064] env[65758]: WARNING nova.objects.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Failed to get minimum service version for cell 86346776-7868-481c-b1cc-f33cbe8e66ad [ 530.047337] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Acquiring lock "singleton_lock" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.047540] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Acquired lock "singleton_lock" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 530.047718] env[65758]: DEBUG oslo_concurrency.lockutils [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Releasing lock "singleton_lock" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 530.048047] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Full set of CONF: {{(pid=65758) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_eventlet/service.py:275}} [ 530.048182] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ******************************************************************************** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 530.048300] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] Configuration options gathered from: {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 530.048424] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 530.048613] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 530.048724] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ================================================================================ {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 530.048922] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] allow_resize_to_same_host = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.049086] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] arq_binding_timeout = 300 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.049240] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] backdoor_port = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.049361] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] backdoor_socket = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.049516] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] block_device_allocate_retries = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.049661] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] block_device_allocate_retries_interval = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.049811] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cell_worker_thread_pool_size = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.049962] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cert = self.pem {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.050131] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.050313] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute_monitors = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.050474] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] config_dir = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.050654] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] config_drive_format = iso9660 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.050782] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.050933] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] config_source = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.051104] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] console_host = devstack {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.051263] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] control_exchange = nova {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.051406] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cpu_allocation_ratio = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.051552] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] daemon = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.051726] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] debug = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.051884] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] default_access_ip_network_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.052048] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] default_availability_zone = nova {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.052192] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] default_ephemeral_format = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.052341] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] default_green_pool_size = 1000 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.052564] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.052820] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] default_schedule_zone = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.053146] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] default_thread_pool_size = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.053425] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] disk_allocation_ratio = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.053701] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] enable_new_services = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.053948] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] flat_injected = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.054149] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] force_config_drive = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.054304] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] force_raw_images = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.054469] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] graceful_shutdown_timeout = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.054652] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] heal_instance_info_cache_interval = -1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.054881] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] host = cpu-1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.055154] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.055435] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] initial_disk_allocation_ratio = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.055720] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] initial_ram_allocation_ratio = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.056081] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.056303] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] instance_build_timeout = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.056474] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] instance_delete_interval = 300 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.056639] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] instance_format = [instance: %(uuid)s] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.056798] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] instance_name_template = instance-%08x {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.056951] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] instance_usage_audit = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.057127] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] instance_usage_audit_period = month {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.057284] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.057440] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] instances_path = /opt/stack/data/nova/instances {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.057598] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] internal_service_availability_zone = internal {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.057743] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] key = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.057891] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] live_migration_retry_count = 30 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.058051] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_color = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.058213] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_config_append = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.058364] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.058512] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_dir = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.058677] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_file = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.058798] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_options = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.058947] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_rotate_interval = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.059118] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_rotate_interval_type = days {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.059302] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] log_rotation_type = none {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.059427] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.059542] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.059699] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.059852] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.059972] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.060132] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] long_rpc_timeout = 1800 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.060295] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] max_concurrent_builds = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.060453] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] max_concurrent_live_migrations = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.060602] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] max_concurrent_snapshots = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.060747] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] max_local_block_devices = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.060890] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] max_logfile_count = 30 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.061044] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] max_logfile_size_mb = 200 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.061195] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] maximum_instance_delete_attempts = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.061341] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] migrate_max_retries = -1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.061494] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] mkisofs_cmd = genisoimage {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.061685] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] my_block_storage_ip = 10.180.1.21 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.061806] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] my_ip = 10.180.1.21 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.061992] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.062158] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] network_allocate_retries = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.062321] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.062477] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] osapi_compute_unique_server_name_scope = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.062632] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] password_length = 12 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.062782] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] periodic_enable = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.062955] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] periodic_fuzzy_delay = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.063130] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] pointer_model = usbtablet {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.063286] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] preallocate_images = none {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.063435] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] publish_errors = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.063553] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] pybasedir = /opt/stack/nova {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.063693] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ram_allocation_ratio = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.063837] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] rate_limit_burst = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.063991] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] rate_limit_except_level = CRITICAL {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.064145] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] rate_limit_interval = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.064297] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] reboot_timeout = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.064442] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] reclaim_instance_interval = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.064581] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] record = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.064742] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] reimage_timeout_per_gb = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.064891] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] report_interval = 120 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.065050] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] rescue_timeout = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.065199] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] reserved_host_cpus = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.065349] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] reserved_host_disk_mb = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.065489] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] reserved_host_memory_mb = 512 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.065638] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] reserved_huge_pages = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.065787] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] resize_confirm_window = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.065932] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] resize_fs_using_block_device = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.066089] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] resume_guests_state_on_host_boot = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.066248] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.066401] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] rpc_response_timeout = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.066551] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] run_external_periodic_tasks = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.066708] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] running_deleted_instance_action = reap {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.066855] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] running_deleted_instance_poll_interval = 1800 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.067009] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] running_deleted_instance_timeout = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.067165] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler_instance_sync_interval = 120 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.067318] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_down_time = 720 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.067472] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] servicegroup_driver = db {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.067617] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] shell_completion = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.067762] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] shelved_offload_time = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.067906] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] shelved_poll_interval = 3600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.068073] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] shutdown_timeout = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.068227] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] source_is_ipv6 = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.068375] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ssl_only = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.068616] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.068769] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] sync_power_state_interval = 600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.068916] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] sync_power_state_pool_size = 1000 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.069083] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] syslog_log_facility = LOG_USER {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.069249] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] tempdir = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.069408] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] thread_pool_statistic_period = -1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.069555] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] timeout_nbd = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.069710] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] transport_url = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.069858] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] update_resources_interval = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.070019] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] use_cow_images = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.070175] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] use_journal = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.070357] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] use_json = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.070519] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] use_rootwrap_daemon = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.070669] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] use_stderr = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.070815] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] use_syslog = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.070957] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vcpu_pin_set = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.071124] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plugging_is_fatal = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.071280] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plugging_timeout = 300 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.071433] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] virt_mkfs = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.071580] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] volume_usage_poll_interval = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.071725] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] watch_log_file = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.071878] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] web = /usr/share/spice-html5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 530.072064] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_concurrency.disable_process_locking = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.072666] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.072850] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_brick.lock_path = /opt/stack/data/os_brick {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.073025] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.073187] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.073351] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.073508] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.073669] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.073823] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.073976] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.074149] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.compute_link_prefix = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.074310] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.074471] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.dhcp_domain = novalocal {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.074623] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.enable_instance_password = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.074838] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.glance_link_prefix = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.075023] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.075194] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.075351] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.instance_list_per_project_cells = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.075500] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.list_records_by_skipping_down_cells = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.075652] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.local_metadata_per_cell = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.075806] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.max_limit = 1000 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.075952] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.metadata_cache_expiration = 15 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.076128] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.neutron_default_project_id = default {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.076286] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.response_validation = warn {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.076438] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.use_neutron_default_nets = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.076588] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.076737] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.076885] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.077049] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.077208] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.vendordata_dynamic_targets = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.077357] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.vendordata_jsonfile_path = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.077522] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.077700] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.backend = dogpile.cache.memcached {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.077856] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.backend_argument = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.078018] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.backend_expiration_time = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.078176] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.config_prefix = cache.oslo {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.078328] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.debug_cache_backend = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.078477] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.enable_retry_client = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.078625] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.enable_socket_keepalive = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.078780] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.enabled = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.078929] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.enforce_fips_mode = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.079092] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.expiration_time = 600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.079285] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.hashclient_dead_timeout = 60.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.079446] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.hashclient_retry_attempts = 2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.079606] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.hashclient_retry_timeout = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.079758] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_dead_retry = 300 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.079906] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_password = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.080067] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.080257] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.080439] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_pool_maxsize = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.080594] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.080748] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_sasl_enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.080918] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.081084] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_socket_timeout = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.081235] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.memcache_username = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.081388] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.proxies = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.081541] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.redis_db = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.081688] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.redis_password = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.081844] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.redis_sentinel_service_name = mymaster {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.082020] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.082177] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.redis_server = localhost:6379 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.082332] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.redis_socket_timeout = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.082478] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.redis_username = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.082628] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.retry_attempts = 2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.082779] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.retry_delay = 0.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.082934] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.socket_keepalive_count = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.083093] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.socket_keepalive_idle = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.083256] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.socket_keepalive_interval = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.083406] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.tls_allowed_ciphers = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.083553] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.tls_cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.083701] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.tls_certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.083849] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.tls_enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.083993] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cache.tls_keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.084167] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.084331] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.auth_type = password {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.084481] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.084648] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.catalog_info = volumev3::publicURL {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.084788] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.084939] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.085103] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.cross_az_attach = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.085254] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.debug = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.085402] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.endpoint_template = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.085556] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.http_retries = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.085707] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.085849] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.086018] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.os_region_name = RegionOne {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.086170] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.086316] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cinder.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.086476] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.086626] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.cpu_dedicated_set = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.086772] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.cpu_shared_set = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.086924] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.image_type_exclude_list = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.087082] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.087237] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.max_concurrent_disk_ops = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.087389] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.max_disk_devices_to_attach = -1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.087534] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.087685] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.088019] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.resource_provider_association_refresh = 300 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.088187] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.088347] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.shutdown_retry_interval = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.088513] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.088679] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] conductor.workers = 2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.088846] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] console.allowed_origins = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.089124] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] console.ssl_ciphers = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.089320] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] console.ssl_minimum_version = default {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.089489] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] consoleauth.enforce_session_timeout = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.089652] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] consoleauth.token_ttl = 600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.089810] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.089956] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.090125] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.090288] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.connect_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.090454] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.connect_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.090600] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.endpoint_override = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.090749] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.090893] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.091049] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.max_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.091197] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.min_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.091342] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.region_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.091493] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.retriable_status_codes = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.091657] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.091816] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.service_type = accelerator {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.091963] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.092120] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.status_code_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.092268] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.status_code_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.092412] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.092580] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.092724] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] cyborg.version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.092881] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.asyncio_connection = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.093034] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.asyncio_slave_connection = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.093197] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.backend = sqlalchemy {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.093354] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.connection = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.093504] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.connection_debug = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.093658] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.connection_parameters = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.093810] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.connection_recycle_time = 3600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.093957] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.connection_trace = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.094118] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.db_inc_retry_interval = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.094271] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.db_max_retries = 20 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.094421] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.db_max_retry_interval = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.094572] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.db_retry_interval = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.094721] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.max_overflow = 50 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.094866] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.max_pool_size = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.095028] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.max_retries = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.095187] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.095334] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.mysql_wsrep_sync_wait = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.095476] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.pool_timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.095625] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.retry_interval = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.095767] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.slave_connection = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.095913] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.sqlite_synchronous = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.096072] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] database.use_db_reconnect = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.096231] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.asyncio_connection = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.096379] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.asyncio_slave_connection = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.096552] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.backend = sqlalchemy {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.096761] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.connection = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.096926] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.connection_debug = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.097097] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.connection_parameters = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.097253] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.connection_recycle_time = 3600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.097407] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.connection_trace = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.097556] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.db_inc_retry_interval = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.097707] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.db_max_retries = 20 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.097859] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.db_max_retry_interval = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.098019] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.db_retry_interval = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.098173] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.max_overflow = 50 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.098322] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.max_pool_size = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.098471] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.max_retries = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.098633] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.098796] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.098919] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.pool_timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.099078] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.retry_interval = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.099252] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.slave_connection = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.099412] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] api_database.sqlite_synchronous = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.099577] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] devices.enabled_mdev_types = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.099739] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.099896] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ephemeral_storage_encryption.default_format = luks {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.100063] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ephemeral_storage_encryption.enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.100218] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.100403] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.api_servers = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.100559] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.100708] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.100859] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.101010] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.connect_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.101166] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.connect_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.101319] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.debug = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.101474] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.default_trusted_certificate_ids = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.101650] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.enable_certificate_validation = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.101804] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.enable_rbd_download = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.101951] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.endpoint_override = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.102122] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.102275] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.102425] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.max_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.102571] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.min_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.102721] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.num_retries = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.102878] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.rbd_ceph_conf = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.103038] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.rbd_connect_timeout = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.103198] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.rbd_pool = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.103353] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.rbd_user = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.103499] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.region_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.103645] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.retriable_status_codes = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.103789] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.103944] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.service_type = image {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.104106] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.104251] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.status_code_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.104405] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.status_code_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.104551] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.104717] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.104869] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.verify_glance_signatures = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.105052] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] glance.version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.105222] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] guestfs.debug = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.105381] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.105578] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.auth_type = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.105764] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.105919] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.106085] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.106237] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.connect_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.106387] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.connect_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.106532] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.endpoint_override = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.106684] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.106830] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.106976] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.max_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.107135] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.min_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.107280] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.region_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.107425] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.retriable_status_codes = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.107566] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.107720] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.service_type = shared-file-system {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.107873] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.share_apply_policy_timeout = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.108033] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.108247] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.status_code_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.108411] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.status_code_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.108560] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.108729] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.108876] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] manila.version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.109047] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] mks.enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.109428] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.109609] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] image_cache.manager_interval = 2400 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.109769] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] image_cache.precache_concurrency = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.109933] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] image_cache.remove_unused_base_images = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.110102] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.110274] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.110456] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] image_cache.subdirectory_name = _base {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.110625] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.api_max_retries = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.110778] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.api_retry_interval = 2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.110927] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.111090] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.auth_type = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.111246] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.111392] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.111546] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.111697] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.conductor_group = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.111846] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.connect_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.111995] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.connect_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.112156] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.endpoint_override = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.112309] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.112458] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.112605] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.max_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.112750] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.min_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.112903] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.peer_list = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.113058] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.region_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.113210] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.retriable_status_codes = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.113362] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.serial_console_state_timeout = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.113506] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.113661] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.service_type = baremetal {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.113808] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.shard = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.113960] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.114119] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.status_code_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.114267] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.status_code_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.114419] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.114585] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.114734] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ironic.version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.114899] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.115074] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] key_manager.fixed_key = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.115581] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.115581] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.barbican_api_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.115581] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.barbican_endpoint = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.115761] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.barbican_endpoint_type = public {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.115816] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.barbican_region_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.115954] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.116109] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.116262] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.116412] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.116549] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.116695] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.number_of_retries = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.116840] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.retry_delay = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.116988] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.send_service_user_token = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.117182] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.117337] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.117486] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.verify_ssl = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.117630] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican.verify_ssl_path = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.117782] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.117932] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.auth_type = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.118092] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.118242] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.118400] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.118548] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.118691] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.118840] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.118984] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] barbican_service_user.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.119151] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.approle_role_id = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.119330] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.approle_secret_id = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.119496] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.kv_mountpoint = secret {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.119645] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.kv_path = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.119795] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.kv_version = 2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.119941] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.namespace = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.120102] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.root_token_id = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.120250] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.ssl_ca_crt_file = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.120432] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.timeout = 60.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.120588] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.use_ssl = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.120743] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.120898] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.121057] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.121236] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.121392] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.connect_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.121539] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.connect_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.121683] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.endpoint_override = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.121827] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.121971] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.122127] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.max_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.122270] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.min_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.122415] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.region_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.122564] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.retriable_status_codes = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.122709] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.122937] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.service_type = identity {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.123107] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.123260] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.status_code_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.123415] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.status_code_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.123565] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.123734] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.123885] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] keystone.version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.124076] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.ceph_mount_options = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.124395] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.124568] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.connection_uri = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.124721] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.cpu_mode = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.124877] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.cpu_model_extra_flags = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.125046] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.cpu_models = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.125209] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.cpu_power_governor_high = performance {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.125370] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.cpu_power_governor_low = powersave {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.125518] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.cpu_power_management = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.125674] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.125825] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.device_detach_attempts = 8 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.125975] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.device_detach_timeout = 20 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.126142] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.disk_cachemodes = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.126291] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.disk_prefix = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.126444] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.enabled_perf_events = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.126608] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.file_backed_memory = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.126764] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.gid_maps = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.126910] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.hw_disk_discard = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.127079] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.hw_machine_type = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.127264] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.images_rbd_ceph_conf = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.127422] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.127573] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.127730] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.images_rbd_glance_store_name = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.127885] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.images_rbd_pool = rbd {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.128057] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.images_type = default {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.128208] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.images_volume_group = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.128361] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.inject_key = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.128508] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.inject_partition = -2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.128655] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.inject_password = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.128801] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.iscsi_iface = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.128950] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.iser_use_multipath = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.129115] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_bandwidth = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.129333] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.129508] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_downtime = 500 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.129663] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.129813] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.129959] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_inbound_addr = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.130123] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_parallel_connections = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.130280] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.130431] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_permit_post_copy = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.130578] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_scheme = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.130739] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_timeout_action = abort {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.130891] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_tunnelled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.131049] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_uri = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.131204] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.live_migration_with_native_tls = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.131386] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.max_queues = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.131543] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.131763] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.131912] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.nfs_mount_options = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.132207] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.132399] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.132559] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.num_iser_scan_tries = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.132710] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.num_memory_encrypted_guests = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.132862] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.133022] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.num_pcie_ports = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.133182] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.num_volume_scan_tries = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.133339] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.pmem_namespaces = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.133484] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.quobyte_client_cfg = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.133768] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.133934] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rbd_connect_timeout = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.134103] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.134259] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.134411] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rbd_secret_uuid = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.134559] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rbd_user = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.134710] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.134867] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.remote_filesystem_transport = ssh {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.135034] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rescue_image_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.135186] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rescue_kernel_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.135331] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rescue_ramdisk_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.135488] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.135635] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.rx_queue_size = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.135785] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.smbfs_mount_options = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.136076] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.136252] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.snapshot_compression = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.136405] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.snapshot_image_format = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.136623] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.136821] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.sparse_logical_volumes = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.137051] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.swtpm_enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.137228] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.swtpm_group = tss {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.137390] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.swtpm_user = tss {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.137572] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.sysinfo_serial = unique {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.137775] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.tb_cache_size = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.137938] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.tx_queue_size = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.138109] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.uid_maps = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.138266] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.use_virtio_for_bridges = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.138427] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.virt_type = kvm {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.138585] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.volume_clear = zero {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.138740] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.volume_clear_size = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.138892] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.volume_enforce_multipath = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.139057] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.volume_use_multipath = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.139236] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.vzstorage_cache_path = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.139403] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.139570] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.vzstorage_mount_group = qemu {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.139744] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.vzstorage_mount_opts = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.139906] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.140201] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.140371] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.vzstorage_mount_user = stack {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.140531] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.140693] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.140854] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.auth_type = password {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.141008] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.141164] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.141318] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.141468] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.connect_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.141611] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.connect_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.141767] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.default_floating_pool = public {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.141910] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.endpoint_override = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.142071] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.extension_sync_interval = 600 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.142224] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.http_retries = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.142375] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.142545] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.142720] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.max_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.142881] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.143041] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.min_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.143203] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.ovs_bridge = br-int {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.143359] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.physnets = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.143516] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.region_name = RegionOne {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.143664] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.retriable_status_codes = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.143819] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.service_metadata_proxy = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.143964] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.144131] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.service_type = network {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.144283] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.144430] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.status_code_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.144578] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.status_code_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.144722] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.144890] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.145047] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] neutron.version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.145211] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] notifications.bdms_in_notifications = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.145375] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] notifications.default_level = INFO {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.145526] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] notifications.include_share_mapping = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.145715] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] notifications.notification_format = unversioned {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.145873] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] notifications.notify_on_state_change = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.146047] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.146215] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] pci.alias = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.146376] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] pci.device_spec = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.146528] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] pci.report_in_placement = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.146693] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.146851] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.auth_type = password {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.147018] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.147179] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.147330] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.147480] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.147627] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.connect_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.147773] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.connect_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.147919] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.default_domain_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.148079] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.default_domain_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.148229] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.domain_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.148376] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.domain_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.148520] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.endpoint_override = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.148698] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.148853] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.149008] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.max_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.149162] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.min_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.149337] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.password = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.149488] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.project_domain_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.149642] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.project_domain_name = Default {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.149794] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.project_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.149952] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.project_name = service {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.150120] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.region_name = RegionOne {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.150272] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.retriable_status_codes = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.150420] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.150575] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.service_type = placement {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.150725] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.150869] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.status_code_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.151024] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.status_code_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.151167] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.system_scope = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.151314] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.151460] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.trust_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.151632] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.user_domain_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.151790] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.user_domain_name = Default {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.151937] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.user_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.152112] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.username = nova {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.152284] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.152433] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] placement.version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.152602] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.cores = 20 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.152755] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.count_usage_from_placement = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.152953] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.153123] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.injected_file_content_bytes = 10240 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.153278] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.injected_file_path_length = 255 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.153432] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.injected_files = 5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.153581] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.instances = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.153733] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.key_pairs = 100 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.153884] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.metadata_items = 128 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.154043] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.ram = 51200 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.154197] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.recheck_quota = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.154352] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.server_group_members = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.154502] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.server_groups = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.154716] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.154880] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] quota.unified_limits_resource_strategy = require {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.155051] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.155206] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.155356] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.image_metadata_prefilter = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.155503] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.155652] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.max_attempts = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.155798] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.max_placement_results = 1000 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.155953] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.156117] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.query_placement_for_image_type_support = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.156268] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.156431] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] scheduler.workers = 2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.156592] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.156845] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.157014] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.157182] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.157336] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.157488] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.157665] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.157851] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.158015] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.host_subset_size = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.158176] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.158327] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.158479] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.158629] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.158779] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.158933] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.isolated_hosts = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.159094] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.isolated_images = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.159273] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.159435] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.159604] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.159755] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.pci_in_placement = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.159905] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.160066] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.160221] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.160404] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.160570] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.160752] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.160908] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.track_instance_changes = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.161084] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.161249] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] metrics.required = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.161406] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] metrics.weight_multiplier = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.161559] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.161710] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] metrics.weight_setting = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.162028] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.162197] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] serial_console.enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.162368] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] serial_console.port_range = 10000:20000 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.162527] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.162685] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.162844] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] serial_console.serialproxy_port = 6083 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.163037] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.163206] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.auth_type = password {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.163355] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.164972] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.165159] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.165315] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.165464] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.165625] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.send_service_user_token = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.165773] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.165919] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] service_user.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.166106] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.agent_enabled = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.166260] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.166574] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.166799] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.166965] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.html5proxy_port = 6082 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.167129] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.image_compression = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.167280] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.jpeg_compression = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.167427] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.playback_compression = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.167579] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.require_secure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.167737] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.server_listen = 127.0.0.1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.167893] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.171903] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.171903] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.streaming_mode = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.172043] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] spice.zlib_compression = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.172170] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] upgrade_levels.baseapi = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.172332] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] upgrade_levels.compute = auto {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.172482] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] upgrade_levels.conductor = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.172643] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] upgrade_levels.scheduler = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.172831] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.172992] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.auth_type = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.173162] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.173318] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.173469] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.173616] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.173760] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.173908] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.174062] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vendordata_dynamic_auth.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.174229] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.api_retry_count = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.174378] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.ca_file = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.174535] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.cache_prefix = devstack-image-cache {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.174689] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.cluster_name = testcl1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.174841] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.connection_pool_size = 10 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.175417] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.console_delay_seconds = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.175575] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.datastore_regex = ^datastore.* {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.175816] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.175985] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.host_password = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.176159] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.host_port = 443 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.176317] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.host_username = administrator@vsphere.local {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.176484] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.insecure = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.176626] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.integration_bridge = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.176779] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.maximum_objects = 100 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.176928] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.pbm_default_policy = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.177133] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.pbm_enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.177286] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.pbm_wsdl_location = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.177442] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.177587] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.serial_port_proxy_uri = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.177732] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.serial_port_service_uri = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.177889] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.task_poll_interval = 0.5 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.178059] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.use_linked_clone = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.178219] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.vnc_keymap = en-us {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.178375] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.vnc_port = 5900 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.178525] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vmware.vnc_port_total = 10000 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.178718] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.auth_schemes = ['none'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.178896] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.179237] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.179421] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.179578] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.novncproxy_port = 6080 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.179754] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.server_listen = 127.0.0.1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.179921] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.180079] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.vencrypt_ca_certs = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.180230] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.vencrypt_client_cert = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.180402] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vnc.vencrypt_client_key = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.180575] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.181971] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.disable_deep_image_inspection = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.182171] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.182335] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.182487] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.182643] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.disable_rootwrap = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.182790] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.enable_numa_live_migration = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.182938] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.183098] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.183244] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.183394] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.libvirt_disable_apic = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.183538] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.183684] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.183834] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.183978] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.184138] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.184286] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.184446] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.184608] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.184770] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.184929] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.185112] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.185271] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] wsgi.secure_proxy_ssl_header = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.185423] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] zvm.ca_file = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.185573] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] zvm.cloud_connector_url = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.185884] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.186066] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] zvm.reachable_timeout = 300 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.186230] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.186396] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.186560] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.connection_string = messaging:// {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.186718] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.enabled = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.186873] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.es_doc_type = notification {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.187035] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.es_scroll_size = 10000 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.187193] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.es_scroll_time = 2m {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.187347] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.filter_error_trace = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.189677] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.hmac_keys = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.189834] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.sentinel_service_name = mymaster {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.189997] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.socket_timeout = 0.1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.190172] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.trace_requests = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.190349] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler.trace_sqlalchemy = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.190535] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler_jaeger.process_tags = {} {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.190715] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler_jaeger.service_name_prefix = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.190873] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] profiler_otlp.service_name_prefix = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.191051] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.191211] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.191369] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.191536] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.191703] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.191857] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.192016] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.192177] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.192331] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.192487] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.192634] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.192790] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.192951] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.193118] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.193275] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.193739] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.193906] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.194076] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.194243] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.194398] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.194587] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.194702] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.194851] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.195012] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.195167] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.195320] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.195475] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.195623] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.195774] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.195924] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.196087] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.ssl = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.196248] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.196408] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.196555] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.196711] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.196863] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.ssl_version = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.197413] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.197606] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.197762] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_notifications.retry = -1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.197930] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.198106] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_messaging_notifications.transport_url = **** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.198269] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.auth_section = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.198425] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.auth_type = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.198570] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.cafile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.198717] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.certfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.198870] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.collect_timing = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.199024] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.connect_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.199197] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.connect_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.199326] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.endpoint_id = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.199484] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.endpoint_interface = publicURL {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.199633] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.endpoint_override = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.199813] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.endpoint_region_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.199977] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.endpoint_service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.200162] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.endpoint_service_type = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.200323] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.insecure = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.200485] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.keyfile = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.200633] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.max_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.201369] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.min_version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.201523] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.region_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.201671] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.retriable_status_codes = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.201820] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.service_name = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.201967] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.service_type = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.202129] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.split_loggers = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.202277] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.status_code_retries = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.202434] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.status_code_retry_delay = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.202568] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.timeout = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.202712] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.valid_interfaces = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.202855] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_limit.version = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.203018] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_reports.file_event_handler = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.203178] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.203326] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] oslo_reports.log_dir = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.203487] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.203663] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.203818] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.203970] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_linux_bridge_privileged.log_daemon_traceback = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.204144] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.204296] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.204442] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.204603] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.204751] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_ovs_privileged.group = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.204894] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.205056] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_ovs_privileged.log_daemon_traceback = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.205213] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.205367] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.205516] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] vif_plug_ovs_privileged.user = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.205691] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_linux_bridge.flat_interface = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.205860] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.206029] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.206195] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.206365] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.206526] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.206682] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.206829] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.206994] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.207186] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_ovs.isolate_vif = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.207345] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.207513] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.207668] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.207821] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_ovs.ovsdb_interface = native {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.207971] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] os_vif_ovs.per_port_bridge = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.208141] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] privsep_osbrick.capabilities = [21, 2] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.208287] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] privsep_osbrick.group = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.208432] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] privsep_osbrick.helper_command = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.208581] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] privsep_osbrick.log_daemon_traceback = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.208731] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.208879] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.209034] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] privsep_osbrick.user = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.209232] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.209374] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] nova_sys_admin.group = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.209522] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] nova_sys_admin.helper_command = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.209672] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] nova_sys_admin.log_daemon_traceback = False {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.209823] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.209968] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.210129] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] nova_sys_admin.user = None {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 530.210254] env[65758]: DEBUG oslo_service.backend._eventlet.service [None req-beb19503-577a-4d0e-8a26-ed94f72489f0 None None] ******************************************************************************** {{(pid=65758) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 530.210678] env[65758]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 530.714378] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Getting list of instances from cluster (obj){ [ 530.714378] env[65758]: value = "domain-c8" [ 530.714378] env[65758]: _type = "ClusterComputeResource" [ 530.714378] env[65758]: } {{(pid=65758) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 530.715629] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccec2c28-1dd3-4d79-81b4-05ce963e5e74 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.725780] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Got total of 0 instances {{(pid=65758) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 530.726396] env[65758]: WARNING nova.virt.vmwareapi.driver [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 530.726858] env[65758]: INFO nova.virt.node [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Generated node identity 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 [ 530.727112] env[65758]: INFO nova.virt.node [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Wrote node identity 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 to /opt/stack/data/n-cpu-1/compute_id [ 531.230544] env[65758]: WARNING nova.compute.manager [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Compute nodes ['0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 532.236640] env[65758]: INFO nova.compute.manager [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 533.242206] env[65758]: WARNING nova.compute.manager [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 533.242578] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 533.242707] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 533.242857] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 533.243022] env[65758]: DEBUG nova.compute.resource_tracker [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 533.243976] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92f930c-efde-442a-a392-73b1f50552de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.252855] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba250f90-8223-48aa-a61e-47e19022db14 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.269152] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00e78ec-8c04-4b19-a849-50193f9cbf8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.276726] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb3abc8-1b2e-4fdf-a270-941a1ee60a96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.305614] env[65758]: DEBUG nova.compute.resource_tracker [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180617MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 533.305791] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 533.305971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 533.808493] env[65758]: WARNING nova.compute.resource_tracker [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] No compute node record for cpu-1:0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 could not be found. [ 534.312798] env[65758]: INFO nova.compute.resource_tracker [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 [ 535.829820] env[65758]: DEBUG nova.compute.resource_tracker [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 535.830209] env[65758]: DEBUG nova.compute.resource_tracker [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 535.999861] env[65758]: INFO nova.scheduler.client.report [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] [req-7f71b401-fc88-40c2-9f1a-12885d723eab] Created resource provider record via placement API for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 536.017448] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797318b8-8e5d-435f-acc6-33ea50332184 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.025917] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46352387-95c3-480a-a559-9b15fc94654c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.065367] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b66556d-1394-4753-90fe-985850bf638d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.073889] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d10101a-f547-4524-b847-a6cb6cf429b8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.088371] env[65758]: DEBUG nova.compute.provider_tree [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 536.625732] env[65758]: DEBUG nova.scheduler.client.report [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 536.625982] env[65758]: DEBUG nova.compute.provider_tree [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 0 to 1 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 536.626164] env[65758]: DEBUG nova.compute.provider_tree [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 536.680342] env[65758]: DEBUG nova.compute.provider_tree [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 1 to 2 during operation: update_traits {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 537.185643] env[65758]: DEBUG nova.compute.resource_tracker [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 537.186084] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.880s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 537.186084] env[65758]: DEBUG nova.service [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Creating RPC server for service compute {{(pid=65758) start /opt/stack/nova/nova/service.py:177}} [ 537.199237] env[65758]: DEBUG nova.service [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] Join ServiceGroup membership for this service compute {{(pid=65758) start /opt/stack/nova/nova/service.py:194}} [ 537.199483] env[65758]: DEBUG nova.servicegroup.drivers.db [None req-e30bd534-89a6-43c1-8232-b97d5118d407 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=65758) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 567.201507] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_power_states {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.705360] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Getting list of instances from cluster (obj){ [ 567.705360] env[65758]: value = "domain-c8" [ 567.705360] env[65758]: _type = "ClusterComputeResource" [ 567.705360] env[65758]: } {{(pid=65758) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 567.706687] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cbd420-d68d-42a4-b550-229b4a6346bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.715745] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Got total of 0 instances {{(pid=65758) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 567.715982] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 567.716319] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Getting list of instances from cluster (obj){ [ 567.716319] env[65758]: value = "domain-c8" [ 567.716319] env[65758]: _type = "ClusterComputeResource" [ 567.716319] env[65758]: } {{(pid=65758) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 567.717330] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81206ad-29b8-4e1c-8a5a-87b2a80d7500 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.725535] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Got total of 0 instances {{(pid=65758) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 580.598296] env[65758]: INFO nova.utils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The default thread pool MainProcess.default is initialized [ 580.599375] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "67fdb417-62ea-412c-8b82-868d59149f89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.599523] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "67fdb417-62ea-412c-8b82-868d59149f89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.105415] env[65758]: DEBUG nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 581.349773] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "549673ec-3d75-4aad-a001-014f3f53a6b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.350374] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "549673ec-3d75-4aad-a001-014f3f53a6b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.655441] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.655441] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.657081] env[65758]: INFO nova.compute.claims [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.845572] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquiring lock "9e16d31b-e84c-448b-9d83-98cac49570a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.846438] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "9e16d31b-e84c-448b-9d83-98cac49570a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.854168] env[65758]: DEBUG nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 582.348304] env[65758]: DEBUG nova.compute.manager [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 582.414945] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.843183] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a602a82c-80a2-43fa-a651-6625d31be236 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.852412] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc2c302-cf2f-4ec2-8224-f1f54f45adf3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.889017] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.889017] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0453559b-ed52-4e1b-acde-dedae6dfe48e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.898714] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d91c549-0ffe-4bf0-9e7e-87fc2f020beb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.913344] env[65758]: DEBUG nova.compute.provider_tree [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.213858] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquiring lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.214172] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.215326] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquiring lock "e4540963-7be9-426e-90f8-b31524d2237b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.215532] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "e4540963-7be9-426e-90f8-b31524d2237b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.419521] env[65758]: DEBUG nova.scheduler.client.report [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 583.719448] env[65758]: DEBUG nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 583.734204] env[65758]: DEBUG nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 583.925595] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 583.926281] env[65758]: DEBUG nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 583.931270] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.516s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.936162] env[65758]: INFO nova.compute.claims [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 584.256376] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.260796] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.389477] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquiring lock "f1a1650b-4c45-47fc-9c45-f4625c959597" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.389715] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "f1a1650b-4c45-47fc-9c45-f4625c959597" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.440812] env[65758]: DEBUG nova.compute.utils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 584.442338] env[65758]: DEBUG nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 584.442640] env[65758]: DEBUG nova.network.neutron [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 584.445459] env[65758]: WARNING neutronclient.v2_0.client [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 584.448355] env[65758]: WARNING neutronclient.v2_0.client [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 584.448735] env[65758]: WARNING openstack [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 584.450715] env[65758]: WARNING openstack [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 584.896340] env[65758]: DEBUG nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 584.957963] env[65758]: DEBUG nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 585.114167] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7fa3e6-7549-4f68-8a71-b7c4d197c927 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.125580] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7384b48d-9a13-46a0-ac55-ce00a27060c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.163858] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f04ecd-5f6f-4f9c-aa34-4bb12ace6a25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.173135] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee64c593-d2ce-4824-8fcb-f89f0f97074d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.189358] env[65758]: DEBUG nova.compute.provider_tree [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.438072] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.470010] env[65758]: DEBUG nova.policy [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5a46f92e344735a697b4f9f07fb536', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aab1df827abb49b88b951d30ba485d39', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 585.488454] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.488792] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.489040] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.489229] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.489433] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.489657] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.489868] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.490121] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 585.490742] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.694347] env[65758]: DEBUG nova.scheduler.client.report [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 585.980046] env[65758]: DEBUG nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 585.994265] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.014804] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 586.015073] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 586.015233] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 586.015396] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 586.015531] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 586.015677] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 586.015878] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 586.016773] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 586.017289] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 586.017477] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 586.017650] env[65758]: DEBUG nova.virt.hardware [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 586.018721] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b7cbdd-850a-4f7f-bd8c-a29689340a1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.028939] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90c1304-5ef4-4f1b-8e52-336802cc02ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.049419] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21431f2c-0c00-4ada-a7a3-1874fdb00522 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.200484] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 586.201657] env[65758]: DEBUG nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 586.203847] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.317s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.208021] env[65758]: INFO nova.compute.claims [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.713308] env[65758]: DEBUG nova.compute.utils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 586.717316] env[65758]: DEBUG nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 586.717834] env[65758]: DEBUG nova.network.neutron [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 586.720436] env[65758]: WARNING neutronclient.v2_0.client [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 586.720436] env[65758]: WARNING neutronclient.v2_0.client [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 586.720436] env[65758]: WARNING openstack [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 586.720919] env[65758]: WARNING openstack [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 586.847627] env[65758]: DEBUG nova.network.neutron [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Successfully created port: bf0bd4f9-a022-486a-96ba-e2c684bfa941 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 587.218484] env[65758]: DEBUG nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 587.303778] env[65758]: DEBUG nova.policy [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e60e042c807349bf8ba4420749e694fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd999e5f3384e4a24ad9ec68b2fa3fda7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 587.350438] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50805858-0434-44c3-a52d-1230c782d902 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.359960] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21116e9c-9dd5-4053-9366-e77be81d5153 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.392347] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b5073e-4813-4fe6-8909-079580594a82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.403788] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06827289-98f2-4cbc-b9ec-b68322a90434 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.421013] env[65758]: DEBUG nova.compute.provider_tree [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.688829] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.691271] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.927616] env[65758]: DEBUG nova.scheduler.client.report [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 588.194073] env[65758]: DEBUG nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 588.232381] env[65758]: DEBUG nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 588.261804] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 588.262105] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 588.262266] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 588.262515] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 588.262643] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 588.262782] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 588.262996] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.263230] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 588.263434] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 588.263632] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 588.263924] env[65758]: DEBUG nova.virt.hardware [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 588.265517] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6b6b9a-49e2-487a-bf63-08584e545d3c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.275216] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9297eb6-ed0e-437c-99c7-b7aca78c3f2b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.370768] env[65758]: DEBUG nova.network.neutron [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Successfully created port: 3d50d517-3f1a-4b04-a81d-54672953d4c6 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 588.433194] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.433721] env[65758]: DEBUG nova.compute.manager [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 588.436627] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.180s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 588.438036] env[65758]: INFO nova.compute.claims [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.626028] env[65758]: DEBUG nova.network.neutron [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Successfully updated port: bf0bd4f9-a022-486a-96ba-e2c684bfa941 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 588.724944] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.942903] env[65758]: DEBUG nova.compute.utils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 588.946097] env[65758]: DEBUG nova.compute.manager [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 589.131633] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "refresh_cache-67fdb417-62ea-412c-8b82-868d59149f89" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.131822] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquired lock "refresh_cache-67fdb417-62ea-412c-8b82-868d59149f89" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.131992] env[65758]: DEBUG nova.network.neutron [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 589.447262] env[65758]: DEBUG nova.compute.manager [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 589.575282] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1793afe-760d-4d94-8668-68cef4bde957 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.585109] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1799bf-5584-4702-ad48-36523b0934ec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.618636] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea507c61-9980-494a-a0ff-b4c047491fca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.627886] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03e2cad-def7-4eb5-b73a-74baa282d43c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.641673] env[65758]: WARNING openstack [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 589.642202] env[65758]: WARNING openstack [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 589.649762] env[65758]: DEBUG nova.compute.provider_tree [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.153758] env[65758]: DEBUG nova.scheduler.client.report [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 590.351204] env[65758]: DEBUG nova.network.neutron [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Successfully updated port: 3d50d517-3f1a-4b04-a81d-54672953d4c6 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 590.407575] env[65758]: DEBUG nova.network.neutron [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 590.458435] env[65758]: DEBUG nova.compute.manager [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 590.498216] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 590.498216] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 590.498216] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 590.498455] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 590.498455] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 590.498455] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 590.498455] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.498702] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 590.499123] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 590.499123] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 590.499250] env[65758]: DEBUG nova.virt.hardware [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 590.500676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd031293-b8fa-426d-86cc-7a801e105bb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.509886] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5998c541-7be3-4943-aa83-d443e9be8462 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.525286] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.537123] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.537123] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5a87d61-cb5d-4a91-9d50-43b7a3c2d0b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.549937] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Created folder: OpenStack in parent group-v4. [ 590.550187] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Creating folder: Project (b0a2aad73cc5476c9543e385e04d1d36). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.550423] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2f04fa4-7829-4b57-af6e-2710cccc3193 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.563306] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Created folder: Project (b0a2aad73cc5476c9543e385e04d1d36) in parent group-v909763. [ 590.563424] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Creating folder: Instances. Parent ref: group-v909764. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.563724] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c51695d-b49c-4ac8-95dd-384d4816e138 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.575850] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Created folder: Instances in parent group-v909764. [ 590.576133] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 590.576347] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 590.576600] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-799e658b-49c5-496e-9ee2-ed66d1ec5325 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.595238] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.595238] env[65758]: value = "task-4659805" [ 590.595238] env[65758]: _type = "Task" [ 590.595238] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.607947] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659805, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.663528] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.227s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.663889] env[65758]: DEBUG nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 590.666832] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.406s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.668435] env[65758]: INFO nova.compute.claims [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.854848] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "refresh_cache-549673ec-3d75-4aad-a001-014f3f53a6b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.855597] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquired lock "refresh_cache-549673ec-3d75-4aad-a001-014f3f53a6b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 590.855597] env[65758]: DEBUG nova.network.neutron [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 591.116115] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659805, 'name': CreateVM_Task, 'duration_secs': 0.287664} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.116308] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 591.117544] env[65758]: DEBUG oslo_vmware.service [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7720333a-369e-4a79-a974-87ccae5b9c4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.125637] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.125885] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.126604] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 591.126922] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2097aa75-c218-467f-9788-a5e9a6456082 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.134385] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 591.134385] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8d840-bf89-488b-97a0-e1f6a288a04e" [ 591.134385] env[65758]: _type = "Task" [ 591.134385] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.140891] env[65758]: WARNING neutronclient.v2_0.client [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 591.143417] env[65758]: WARNING openstack [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 591.143417] env[65758]: WARNING openstack [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 591.161555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.161795] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.162025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.162162] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.162984] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 591.163270] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a918941c-0584-469c-907e-efac8dc25fa7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.173436] env[65758]: DEBUG nova.compute.utils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 591.177360] env[65758]: DEBUG nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 591.177557] env[65758]: DEBUG nova.network.neutron [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 591.177882] env[65758]: WARNING neutronclient.v2_0.client [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 591.178296] env[65758]: WARNING neutronclient.v2_0.client [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 591.178874] env[65758]: WARNING openstack [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 591.179238] env[65758]: WARNING openstack [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 591.187652] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 591.187826] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 591.188692] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c59e30-ee11-4a9a-bd64-7fb88874470e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.197957] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e40725c-eb53-49ae-a83c-1787eff206d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.203581] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 591.203581] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52750246-4d5f-a9f9-ba66-28fa4ca90bbb" [ 591.203581] env[65758]: _type = "Task" [ 591.203581] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.215553] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52750246-4d5f-a9f9-ba66-28fa4ca90bbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.310983] env[65758]: DEBUG nova.network.neutron [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Updating instance_info_cache with network_info: [{"id": "bf0bd4f9-a022-486a-96ba-e2c684bfa941", "address": "fa:16:3e:a3:16:62", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf0bd4f9-a0", "ovs_interfaceid": "bf0bd4f9-a022-486a-96ba-e2c684bfa941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 591.359952] env[65758]: WARNING openstack [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 591.362204] env[65758]: WARNING openstack [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 591.375958] env[65758]: DEBUG nova.policy [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7df5b8985cc546e284ffe0b758eb58b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fd9b52de8d045e2b42b8646ea659584', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 591.678658] env[65758]: DEBUG nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 591.729408] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Preparing fetch location {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 591.729983] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Creating directory with path [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 591.730496] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-124a8702-f2be-4da8-a8e4-e1d87a753f46 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.759035] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Created directory with path [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 591.760138] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Fetch image to [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 591.760138] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Downloading image file data 75a6399b-5100-4c51-b5cf-162bd505a28f to [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk on the data store datastore2 {{(pid=65758) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 591.760309] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c757f6-f23d-4975-8603-66cf3f0d6221 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.773514] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcf6a7d-ab44-4d7f-91e6-d7f40312eca3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.781317] env[65758]: DEBUG nova.network.neutron [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 591.795072] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6701147-3631-40c2-b12d-24075fa482e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.828008] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Releasing lock "refresh_cache-67fdb417-62ea-412c-8b82-868d59149f89" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.828378] env[65758]: DEBUG nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Instance network_info: |[{"id": "bf0bd4f9-a022-486a-96ba-e2c684bfa941", "address": "fa:16:3e:a3:16:62", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf0bd4f9-a0", "ovs_interfaceid": "bf0bd4f9-a022-486a-96ba-e2c684bfa941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 591.832499] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:16:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf0bd4f9-a022-486a-96ba-e2c684bfa941', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 591.840727] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Creating folder: Project (aab1df827abb49b88b951d30ba485d39). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 591.841584] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c3862a-187e-4f10-9c63-4abe5707bf9b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.844840] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e92beb23-361d-4c34-aa8a-45f50a29dc80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.852292] env[65758]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7f3806cb-6ae8-40b9-8759-0ec9dd7cb245 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.869478] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Created folder: Project (aab1df827abb49b88b951d30ba485d39) in parent group-v909763. [ 591.872282] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Creating folder: Instances. Parent ref: group-v909767. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 591.872282] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e53cf3d-29dc-485c-aa47-d8fc1166a4df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.882590] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Created folder: Instances in parent group-v909767. [ 591.882830] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 591.883132] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 591.883277] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c99c0d74-5faf-4d21-b4ac-10b1c0c72ce7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.902834] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Downloading image file data 75a6399b-5100-4c51-b5cf-162bd505a28f to the data store datastore2 {{(pid=65758) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 591.910981] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 591.910981] env[65758]: value = "task-4659808" [ 591.910981] env[65758]: _type = "Task" [ 591.910981] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.919868] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659808, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.923400] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119ce794-e88b-4ca7-8922-77a09771494a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.935419] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9162fc34-57e0-4887-9656-99236147399b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.973488] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1965886-906a-4ff3-ab1e-be1023ec01da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.984263] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35570a5e-46fc-4606-b9ae-fb658228309e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.001018] env[65758]: DEBUG nova.compute.provider_tree [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.016895] env[65758]: DEBUG nova.network.neutron [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Successfully created port: af073ff0-f4c1-43b4-bf05-beb5e71db8ac {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 592.019467] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.019624] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.040100] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=65758) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 592.425040] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659808, 'name': CreateVM_Task, 'duration_secs': 0.397289} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.425040] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 592.425552] env[65758]: WARNING neutronclient.v2_0.client [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 592.507681] env[65758]: DEBUG nova.scheduler.client.report [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 592.523275] env[65758]: DEBUG nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 592.689870] env[65758]: DEBUG nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 592.728965] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 592.728965] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 592.729317] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 592.729317] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 592.729436] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 592.729577] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 592.729809] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.729986] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 592.730241] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 592.730431] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 592.730617] env[65758]: DEBUG nova.virt.hardware [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 592.731566] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8821195a-c0b2-450a-98d5-4b4a2681b1d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.744030] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5280ef01-30cf-4216-aa6f-fccf8a28a38b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.895038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.895038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.895038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 592.895038] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9d23e6e-7569-41fd-acc0-fccaa3ca9a1c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.909722] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Completed reading data from the image iterator. {{(pid=65758) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 592.909957] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 592.911776] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 592.911776] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521e7435-fd1c-5fdf-51e7-0d8ffdb23946" [ 592.911776] env[65758]: _type = "Task" [ 592.911776] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.922225] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.922465] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.922674] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.970614] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Downloaded image file data 75a6399b-5100-4c51-b5cf-162bd505a28f to vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk on the data store datastore2 {{(pid=65758) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 592.972181] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Caching image {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 592.972428] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Copying Virtual Disk [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk to [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 592.972707] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb6fce3f-1e20-4fe6-9121-fb5999bad745 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.982027] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 592.982027] env[65758]: value = "task-4659809" [ 592.982027] env[65758]: _type = "Task" [ 592.982027] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.991641] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659809, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.015748] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.349s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.016959] env[65758]: DEBUG nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 593.022114] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.584s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.022114] env[65758]: INFO nova.compute.claims [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.060156] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.493920] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659809, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.527652] env[65758]: DEBUG nova.compute.utils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 593.532313] env[65758]: DEBUG nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 593.532313] env[65758]: DEBUG nova.network.neutron [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 593.532313] env[65758]: WARNING neutronclient.v2_0.client [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 593.532313] env[65758]: WARNING neutronclient.v2_0.client [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 593.532313] env[65758]: WARNING openstack [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 593.532796] env[65758]: WARNING openstack [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 593.975160] env[65758]: DEBUG nova.network.neutron [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Successfully updated port: af073ff0-f4c1-43b4-bf05-beb5e71db8ac {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 593.994497] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659809, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.036644] env[65758]: WARNING neutronclient.v2_0.client [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 594.037805] env[65758]: WARNING openstack [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 594.037805] env[65758]: WARNING openstack [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 594.052652] env[65758]: DEBUG nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 594.234023] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1875f77-62dd-4252-87dd-cc99f6de8ed7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.241597] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7d7cf1-1737-44dc-a9a9-73d8bea202d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.277158] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c808e9-63b0-46a6-ae16-515bc44e6415 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.285719] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca58b81-9d08-47e3-89ef-98e9818c1519 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.303278] env[65758]: DEBUG nova.compute.provider_tree [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.478061] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquiring lock "refresh_cache-a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.478315] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquired lock "refresh_cache-a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.479164] env[65758]: DEBUG nova.network.neutron [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 594.496127] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659809, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.499735] env[65758]: DEBUG nova.policy [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3306c44df27a4f9a900befe5d2ae406a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57c8e01c5bb14062ae9c179e013a1c1e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 594.512865] env[65758]: DEBUG nova.network.neutron [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Updating instance_info_cache with network_info: [{"id": "3d50d517-3f1a-4b04-a81d-54672953d4c6", "address": "fa:16:3e:6b:10:71", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d50d517-3f", "ovs_interfaceid": "3d50d517-3f1a-4b04-a81d-54672953d4c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 594.809672] env[65758]: DEBUG nova.scheduler.client.report [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 594.983959] env[65758]: WARNING openstack [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 594.983959] env[65758]: WARNING openstack [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 594.993302] env[65758]: DEBUG nova.network.neutron [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Successfully created port: 83c16429-d108-4a97-84ec-81e4398f9881 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 595.006236] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659809, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.722391} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.006632] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Copied Virtual Disk [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk to [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 595.006799] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Deleting the datastore file [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 595.007097] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-358ff8bd-6b0e-475a-b958-9936ebe0ab91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.016283] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Releasing lock "refresh_cache-549673ec-3d75-4aad-a001-014f3f53a6b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.017208] env[65758]: DEBUG nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Instance network_info: |[{"id": "3d50d517-3f1a-4b04-a81d-54672953d4c6", "address": "fa:16:3e:6b:10:71", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d50d517-3f", "ovs_interfaceid": "3d50d517-3f1a-4b04-a81d-54672953d4c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 595.018446] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 595.018446] env[65758]: value = "task-4659810" [ 595.018446] env[65758]: _type = "Task" [ 595.018446] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.018709] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:10:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d50d517-3f1a-4b04-a81d-54672953d4c6', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 595.026923] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Creating folder: Project (d999e5f3384e4a24ad9ec68b2fa3fda7). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.027702] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae0e84d3-fa1d-413a-b9da-401cd2f848cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.039752] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659810, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.045479] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Created folder: Project (d999e5f3384e4a24ad9ec68b2fa3fda7) in parent group-v909763. [ 595.045686] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Creating folder: Instances. Parent ref: group-v909770. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.045994] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d074505a-778f-4a71-ad43-a12b9a8c5836 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.058567] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Created folder: Instances in parent group-v909770. [ 595.058567] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 595.058567] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 595.058567] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af1528dc-560f-4dc2-aa78-2f71038487e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.075797] env[65758]: DEBUG nova.network.neutron [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 595.079063] env[65758]: DEBUG nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 595.087938] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 595.087938] env[65758]: value = "task-4659813" [ 595.087938] env[65758]: _type = "Task" [ 595.087938] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.098661] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659813, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.111236] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 595.111528] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 595.111528] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 595.111662] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 595.111845] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 595.112010] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 595.112260] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.113042] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 595.113042] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 595.113042] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 595.113042] env[65758]: DEBUG nova.virt.hardware [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 595.114137] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebc0e3b-326f-427b-b713-d807a7c03c4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.123502] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b14332-6411-43a3-9eeb-e1daa36604a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.318875] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.319546] env[65758]: DEBUG nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 595.322979] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.329s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.323215] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.323409] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 595.323703] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.599s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.325168] env[65758]: INFO nova.compute.claims [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.328715] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe771b0-9c49-45b9-9c82-0da32447b6af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.341172] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f74285b-1b15-4b4a-84b2-37d26f2d67f2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.360334] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b989dd38-7edf-4a65-9765-1593d7bd4ac8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.374132] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1496a6-3622-4cd8-9b4c-4f93b798e33a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.410854] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180603MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 595.411019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.542828] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659810, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030361} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.543182] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 595.543182] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Moving file from [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573/75a6399b-5100-4c51-b5cf-162bd505a28f to [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f. {{(pid=65758) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 595.544179] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-600ebeb1-1a40-4a45-bcb3-2bff1aac419b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.553157] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 595.553157] env[65758]: value = "task-4659814" [ 595.553157] env[65758]: _type = "Task" [ 595.553157] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.567304] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659814, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.605902] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659813, 'name': CreateVM_Task, 'duration_secs': 0.380936} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.606837] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 595.606837] env[65758]: WARNING neutronclient.v2_0.client [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 595.607058] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.607117] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.608023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 595.608023] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87241e92-e01d-4490-9182-2296769da6b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.616428] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 595.616428] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8ab65-1b29-d0df-8598-6f626de03c61" [ 595.616428] env[65758]: _type = "Task" [ 595.616428] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.628627] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8ab65-1b29-d0df-8598-6f626de03c61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.666290] env[65758]: WARNING neutronclient.v2_0.client [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 595.666956] env[65758]: WARNING openstack [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 595.667324] env[65758]: WARNING openstack [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 595.768796] env[65758]: DEBUG nova.network.neutron [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Updating instance_info_cache with network_info: [{"id": "af073ff0-f4c1-43b4-bf05-beb5e71db8ac", "address": "fa:16:3e:6e:d0:9d", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf073ff0-f4", "ovs_interfaceid": "af073ff0-f4c1-43b4-bf05-beb5e71db8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 595.832690] env[65758]: DEBUG nova.compute.utils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 595.837055] env[65758]: DEBUG nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 595.837362] env[65758]: DEBUG nova.network.neutron [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 595.837724] env[65758]: WARNING neutronclient.v2_0.client [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 595.839065] env[65758]: WARNING neutronclient.v2_0.client [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 595.839065] env[65758]: WARNING openstack [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 595.839291] env[65758]: WARNING openstack [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 595.886931] env[65758]: DEBUG nova.policy [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d2e4ce13d6043768cbc1ac8395f2f08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5af9eac7501241d68c48c140efa1a19b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.066050] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659814, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.032432} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.066050] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] File moved {{(pid=65758) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 596.066050] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Cleaning up location [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573 {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 596.066050] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Deleting the datastore file [datastore2] vmware_temp/3b539f06-1f0f-453d-8f79-d1fdbf414573 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 596.066597] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f835c2b7-8170-4aa4-88ee-9bbf3cbffdf3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.075449] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 596.075449] env[65758]: value = "task-4659815" [ 596.075449] env[65758]: _type = "Task" [ 596.075449] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.087325] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.129217] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8ab65-1b29-d0df-8598-6f626de03c61, 'name': SearchDatastore_Task, 'duration_secs': 0.009559} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.129391] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.129621] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.129906] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.141340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquiring lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.141946] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.191780] env[65758]: DEBUG nova.compute.manager [req-e6e29cec-8bad-4ef4-bf2e-d8accd9f8561 req-9efe949d-16b1-4ca6-84e2-29be2e571a0d service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Received event network-vif-plugged-bf0bd4f9-a022-486a-96ba-e2c684bfa941 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 596.192093] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6e29cec-8bad-4ef4-bf2e-d8accd9f8561 req-9efe949d-16b1-4ca6-84e2-29be2e571a0d service nova] Acquiring lock "67fdb417-62ea-412c-8b82-868d59149f89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.192355] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6e29cec-8bad-4ef4-bf2e-d8accd9f8561 req-9efe949d-16b1-4ca6-84e2-29be2e571a0d service nova] Lock "67fdb417-62ea-412c-8b82-868d59149f89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.192651] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6e29cec-8bad-4ef4-bf2e-d8accd9f8561 req-9efe949d-16b1-4ca6-84e2-29be2e571a0d service nova] Lock "67fdb417-62ea-412c-8b82-868d59149f89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.193709] env[65758]: DEBUG nova.compute.manager [req-e6e29cec-8bad-4ef4-bf2e-d8accd9f8561 req-9efe949d-16b1-4ca6-84e2-29be2e571a0d service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] No waiting events found dispatching network-vif-plugged-bf0bd4f9-a022-486a-96ba-e2c684bfa941 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 596.193709] env[65758]: WARNING nova.compute.manager [req-e6e29cec-8bad-4ef4-bf2e-d8accd9f8561 req-9efe949d-16b1-4ca6-84e2-29be2e571a0d service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Received unexpected event network-vif-plugged-bf0bd4f9-a022-486a-96ba-e2c684bfa941 for instance with vm_state building and task_state spawning. [ 596.273592] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Releasing lock "refresh_cache-a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.276127] env[65758]: DEBUG nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Instance network_info: |[{"id": "af073ff0-f4c1-43b4-bf05-beb5e71db8ac", "address": "fa:16:3e:6e:d0:9d", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf073ff0-f4", "ovs_interfaceid": "af073ff0-f4c1-43b4-bf05-beb5e71db8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 596.276226] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:d0:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af073ff0-f4c1-43b4-bf05-beb5e71db8ac', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 596.285646] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Creating folder: Project (3fd9b52de8d045e2b42b8646ea659584). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.285959] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf331265-5c08-4a2c-b74d-23d622d70c05 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.304321] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Created folder: Project (3fd9b52de8d045e2b42b8646ea659584) in parent group-v909763. [ 596.304321] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Creating folder: Instances. Parent ref: group-v909773. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.304528] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0f3beab-e7ae-4a9e-ba20-18d0ba2b8956 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.319623] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Created folder: Instances in parent group-v909773. [ 596.319921] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 596.320262] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 596.320435] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e26c342-0999-4761-a403-1d224b787407 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.337329] env[65758]: DEBUG nova.network.neutron [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Successfully created port: f3319916-956f-49ba-9da5-ad0df9c5953c {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 596.347154] env[65758]: DEBUG nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 596.353626] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 596.353626] env[65758]: value = "task-4659818" [ 596.353626] env[65758]: _type = "Task" [ 596.353626] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.374833] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659818, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.380596] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "37aadd44-79e8-4479-862f-265549c9d802" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.380932] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "37aadd44-79e8-4479-862f-265549c9d802" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.588344] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029627} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.588829] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 596.589459] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72ee122d-e7ee-48e8-9b40-c2a260ea8523 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.602065] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 596.602065] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527e414b-7ea0-2bfc-cd0b-6bc76d4c0a38" [ 596.602065] env[65758]: _type = "Task" [ 596.602065] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.612037] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527e414b-7ea0-2bfc-cd0b-6bc76d4c0a38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.621976] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae614c5-ff9d-410b-89d9-9f3698822f86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.632119] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e268dd71-27a5-4645-a997-bdac0f08355e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.666819] env[65758]: DEBUG nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 596.670017] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802cfdf1-b7f4-4f71-b1a1-2f3ba6fdc53f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.678981] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62869473-b1f1-4d94-8edd-4750305a8184 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.693700] env[65758]: DEBUG nova.compute.provider_tree [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 596.860571] env[65758]: DEBUG nova.network.neutron [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Successfully updated port: 83c16429-d108-4a97-84ec-81e4398f9881 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 596.873851] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659818, 'name': CreateVM_Task, 'duration_secs': 0.389839} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.874656] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 596.875233] env[65758]: WARNING neutronclient.v2_0.client [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 596.876024] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.876024] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.876136] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 596.876667] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-043ac53c-1563-4d36-a8f3-676c262d14b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.884899] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 596.884899] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524ef0f8-0cf3-ca6f-2c16-8d744a2e574b" [ 596.884899] env[65758]: _type = "Task" [ 596.884899] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.887715] env[65758]: DEBUG nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 596.897019] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524ef0f8-0cf3-ca6f-2c16-8d744a2e574b, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.897019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.897019] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.897019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.113288] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527e414b-7ea0-2bfc-cd0b-6bc76d4c0a38, 'name': SearchDatastore_Task, 'duration_secs': 0.010505} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.113627] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.113970] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 9e16d31b-e84c-448b-9d83-98cac49570a0/9e16d31b-e84c-448b-9d83-98cac49570a0.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.114266] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.114441] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.114649] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31d6f5b4-9561-4316-bfcb-828166a71075 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.116766] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a307218b-4521-4513-80d7-22e49fc35177 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.126073] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 597.126073] env[65758]: value = "task-4659819" [ 597.126073] env[65758]: _type = "Task" [ 597.126073] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.127253] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.127469] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.131055] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f88f1f2c-24ce-485f-b28c-71e9b446c055 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.138645] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.140028] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 597.140028] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52004b23-3ba3-4115-f2c9-c76258816863" [ 597.140028] env[65758]: _type = "Task" [ 597.140028] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.148835] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52004b23-3ba3-4115-f2c9-c76258816863, 'name': SearchDatastore_Task, 'duration_secs': 0.008722} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.149614] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fed8348-5d02-4e54-81a9-178fe69f739e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.155586] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 597.155586] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5208d4df-dbe4-b7d2-3103-5e5637e75d77" [ 597.155586] env[65758]: _type = "Task" [ 597.155586] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.166605] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5208d4df-dbe4-b7d2-3103-5e5637e75d77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.198321] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.222117] env[65758]: ERROR nova.scheduler.client.report [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [req-8f1ffa16-fe10-431a-9896-bf5850f58f1f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8f1ffa16-fe10-431a-9896-bf5850f58f1f"}]} [ 597.248203] env[65758]: DEBUG nova.scheduler.client.report [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 597.274433] env[65758]: DEBUG nova.scheduler.client.report [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 597.274719] env[65758]: DEBUG nova.compute.provider_tree [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 597.298206] env[65758]: DEBUG nova.scheduler.client.report [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: aac11ce6-0e88-4314-bbd0-8e388ff0a87c {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 597.328392] env[65758]: DEBUG nova.scheduler.client.report [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 597.369665] env[65758]: DEBUG nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 597.370888] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquiring lock "refresh_cache-e4540963-7be9-426e-90f8-b31524d2237b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.371067] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquired lock "refresh_cache-e4540963-7be9-426e-90f8-b31524d2237b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.371242] env[65758]: DEBUG nova.network.neutron [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 597.414650] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 597.415026] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 597.415132] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 597.415273] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 597.415435] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 597.415571] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 597.416482] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.416482] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 597.416482] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 597.416482] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 597.416482] env[65758]: DEBUG nova.virt.hardware [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 597.417730] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab081d9b-bdd4-4415-b69f-f694ca0097c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.428203] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd110431-4e52-4110-b7b5-be35abcbf0b4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.447810] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.613378] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9abf39f-cb33-4e6b-beb1-da232af68a81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.625562] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60ee3bc-1f60-44bd-8c01-4d13b1f3c260 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.644968] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659819, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.686755] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e9a8bd-db2a-4e76-ac9e-29f547124636 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.699590] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8ebca4-a567-4dc2-acee-6f2539bdccb4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.703974] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5208d4df-dbe4-b7d2-3103-5e5637e75d77, 'name': SearchDatastore_Task, 'duration_secs': 0.008417} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.704280] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.704554] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 67fdb417-62ea-412c-8b82-868d59149f89/67fdb417-62ea-412c-8b82-868d59149f89.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.705220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.705414] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.706220] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5156681e-afc7-4c2d-af35-98cb0bcd4f04 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.715570] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1eef1a14-f8a2-4d4a-b941-8b279828f1a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.718370] env[65758]: DEBUG nova.compute.provider_tree [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 597.727051] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 597.727051] env[65758]: value = "task-4659820" [ 597.727051] env[65758]: _type = "Task" [ 597.727051] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.728401] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.728457] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.733841] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3b16b03-9414-4a6e-b9ed-5f1d2b142292 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.742241] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 597.742241] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52faa53b-c15f-e3a2-f240-dec6c5b8dcf9" [ 597.742241] env[65758]: _type = "Task" [ 597.742241] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.746321] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659820, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.756910] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52faa53b-c15f-e3a2-f240-dec6c5b8dcf9, 'name': SearchDatastore_Task, 'duration_secs': 0.008984} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.757782] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a02c8fd1-e953-46f2-8365-9e3b0893ef70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.764304] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 597.764304] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5273486c-a2ee-20e8-539d-17a3b7269870" [ 597.764304] env[65758]: _type = "Task" [ 597.764304] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.777115] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5273486c-a2ee-20e8-539d-17a3b7269870, 'name': SearchDatastore_Task} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.780537] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.780891] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 549673ec-3d75-4aad-a001-014f3f53a6b0/549673ec-3d75-4aad-a001-014f3f53a6b0.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.781436] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.781674] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.781905] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8913820a-4c49-4165-9535-428c50f65b2e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.784267] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b598135c-3962-47a9-9e59-50fbbb7c7b41 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.794399] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 597.794399] env[65758]: value = "task-4659821" [ 597.794399] env[65758]: _type = "Task" [ 597.794399] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.795855] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.796042] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.800210] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58ce5418-206a-421e-9ea8-d6d04ffca375 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.810474] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.812067] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 597.812067] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c3acc5-b258-2621-b233-79f18c431aef" [ 597.812067] env[65758]: _type = "Task" [ 597.812067] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.821662] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c3acc5-b258-2621-b233-79f18c431aef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.876891] env[65758]: WARNING openstack [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 597.877635] env[65758]: WARNING openstack [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 598.126672] env[65758]: DEBUG nova.network.neutron [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Successfully updated port: f3319916-956f-49ba-9da5-ad0df9c5953c {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 598.143944] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659819, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517612} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.145363] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 9e16d31b-e84c-448b-9d83-98cac49570a0/9e16d31b-e84c-448b-9d83-98cac49570a0.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.146194] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.149232] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f24edda3-d268-4ff1-9faa-ee2a0140afb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.161405] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 598.161405] env[65758]: value = "task-4659822" [ 598.161405] env[65758]: _type = "Task" [ 598.161405] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.175577] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.194347] env[65758]: DEBUG nova.network.neutron [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 598.239448] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659820, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486811} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.239523] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 67fdb417-62ea-412c-8b82-868d59149f89/67fdb417-62ea-412c-8b82-868d59149f89.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.239715] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.240194] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca8821ae-df62-4bbd-8508-72befb529e8a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.248606] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 598.248606] env[65758]: value = "task-4659823" [ 598.248606] env[65758]: _type = "Task" [ 598.248606] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.260549] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659823, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.275545] env[65758]: DEBUG nova.scheduler.client.report [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 13 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 598.275545] env[65758]: DEBUG nova.compute.provider_tree [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 13 to 14 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 598.275920] env[65758]: DEBUG nova.compute.provider_tree [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 598.311825] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659821, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.327619] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c3acc5-b258-2621-b233-79f18c431aef, 'name': SearchDatastore_Task, 'duration_secs': 0.022819} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.329025] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02614fa5-dd38-4c11-83c0-d225fb9b5234 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.337981] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 598.337981] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522c71a3-39e9-a32e-ea5d-3ca58c7044d8" [ 598.337981] env[65758]: _type = "Task" [ 598.337981] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.349460] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522c71a3-39e9-a32e-ea5d-3ca58c7044d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.597033] env[65758]: WARNING neutronclient.v2_0.client [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 598.597467] env[65758]: WARNING openstack [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 598.597786] env[65758]: WARNING openstack [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 598.630497] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquiring lock "refresh_cache-f1a1650b-4c45-47fc-9c45-f4625c959597" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.630497] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquired lock "refresh_cache-f1a1650b-4c45-47fc-9c45-f4625c959597" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.631798] env[65758]: DEBUG nova.network.neutron [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 598.674012] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.216485} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.674752] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 598.675665] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4110f2c-c0bf-446e-84aa-21cc48e2301d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.705108] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 9e16d31b-e84c-448b-9d83-98cac49570a0/9e16d31b-e84c-448b-9d83-98cac49570a0.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 598.707849] env[65758]: DEBUG nova.network.neutron [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Updating instance_info_cache with network_info: [{"id": "83c16429-d108-4a97-84ec-81e4398f9881", "address": "fa:16:3e:f5:f8:b7", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c16429-d1", "ovs_interfaceid": "83c16429-d108-4a97-84ec-81e4398f9881", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 598.709543] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-049cf2ee-ef8a-46d6-80fb-92438bf1a66e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.726476] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Releasing lock "refresh_cache-e4540963-7be9-426e-90f8-b31524d2237b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.726853] env[65758]: DEBUG nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Instance network_info: |[{"id": "83c16429-d108-4a97-84ec-81e4398f9881", "address": "fa:16:3e:f5:f8:b7", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c16429-d1", "ovs_interfaceid": "83c16429-d108-4a97-84ec-81e4398f9881", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 598.727801] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:f8:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83c16429-d108-4a97-84ec-81e4398f9881', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.735976] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Creating folder: Project (57c8e01c5bb14062ae9c179e013a1c1e). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.737353] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa590c11-bec6-4197-a864-b7d4a90df8cd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.743086] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 598.743086] env[65758]: value = "task-4659824" [ 598.743086] env[65758]: _type = "Task" [ 598.743086] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.754269] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Created folder: Project (57c8e01c5bb14062ae9c179e013a1c1e) in parent group-v909763. [ 598.754550] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Creating folder: Instances. Parent ref: group-v909776. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.758761] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de358290-01a4-4a3e-9f09-d42ddf8dbc7e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.760737] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659824, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.768722] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659823, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10263} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.768722] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 598.768722] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9aa7b91-9da1-46fe-b4ec-a58f0ee2dfc7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.786108] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.462s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.786774] env[65758]: DEBUG nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 598.800285] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 67fdb417-62ea-412c-8b82-868d59149f89/67fdb417-62ea-412c-8b82-868d59149f89.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 598.801688] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.741s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.803279] env[65758]: INFO nova.compute.claims [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.806250] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7e192fb-3810-4641-af6c-871dbc0ee4f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.821919] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Created folder: Instances in parent group-v909776. [ 598.822209] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 598.826529] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 598.827301] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02848a5d-8a0e-4aff-abf8-2142418c2cfe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.846469] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 598.846469] env[65758]: value = "task-4659827" [ 598.846469] env[65758]: _type = "Task" [ 598.846469] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.859427] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.736166} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.862223] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 549673ec-3d75-4aad-a001-014f3f53a6b0/549673ec-3d75-4aad-a001-014f3f53a6b0.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.864441] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.864441] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80ef8021-2e07-4e28-a491-8311735b6de9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.876561] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.876775] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522c71a3-39e9-a32e-ea5d-3ca58c7044d8, 'name': SearchDatastore_Task, 'duration_secs': 0.054528} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.877088] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.877088] env[65758]: value = "task-4659828" [ 598.877088] env[65758]: _type = "Task" [ 598.877088] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.878671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.878847] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974/a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 598.879706] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da3ccfbe-2e75-4ce0-9a29-616ab6467913 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.894711] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659828, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.898576] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 598.898576] env[65758]: value = "task-4659829" [ 598.898576] env[65758]: _type = "Task" [ 598.898576] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.898944] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 598.898944] env[65758]: value = "task-4659830" [ 598.898944] env[65758]: _type = "Task" [ 598.898944] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.915527] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.920723] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.135525] env[65758]: WARNING openstack [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 599.135934] env[65758]: WARNING openstack [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 599.180903] env[65758]: DEBUG nova.network.neutron [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 599.258423] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659824, 'name': ReconfigVM_Task, 'duration_secs': 0.428877} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.258877] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 9e16d31b-e84c-448b-9d83-98cac49570a0/9e16d31b-e84c-448b-9d83-98cac49570a0.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 599.260159] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7898101-1521-4daf-9b3e-e60b1e1e4cd1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.273193] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 599.273193] env[65758]: value = "task-4659831" [ 599.273193] env[65758]: _type = "Task" [ 599.273193] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.292173] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659831, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.304713] env[65758]: DEBUG nova.compute.utils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 599.311743] env[65758]: DEBUG nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 599.312014] env[65758]: DEBUG nova.network.neutron [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 599.315262] env[65758]: WARNING neutronclient.v2_0.client [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 599.316141] env[65758]: WARNING neutronclient.v2_0.client [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 599.316928] env[65758]: WARNING openstack [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 599.317299] env[65758]: WARNING openstack [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 599.326937] env[65758]: WARNING neutronclient.v2_0.client [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 599.327865] env[65758]: WARNING openstack [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 599.328233] env[65758]: WARNING openstack [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 599.369223] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659827, 'name': ReconfigVM_Task, 'duration_secs': 0.53012} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.369223] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 67fdb417-62ea-412c-8b82-868d59149f89/67fdb417-62ea-412c-8b82-868d59149f89.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 599.369223] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8239a65e-902b-4190-b4ca-99d7b44898f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.378969] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 599.378969] env[65758]: value = "task-4659832" [ 599.378969] env[65758]: _type = "Task" [ 599.378969] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.401904] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659832, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.404614] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659828, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.424633] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080925} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.424808] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659829, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.425081] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 599.425936] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81914331-a5dc-41c3-9b04-93d11b666f6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.454200] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 549673ec-3d75-4aad-a001-014f3f53a6b0/549673ec-3d75-4aad-a001-014f3f53a6b0.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 599.454530] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e8f5850-cfa8-4fa0-8f44-e021d0332cc2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.475851] env[65758]: DEBUG nova.network.neutron [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Updating instance_info_cache with network_info: [{"id": "f3319916-956f-49ba-9da5-ad0df9c5953c", "address": "fa:16:3e:16:f7:af", "network": {"id": "eb6c4afb-5353-490f-877a-96738c4fa9e3", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1051072804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5af9eac7501241d68c48c140efa1a19b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41f66e20-fd86-4158-bbdc-7a150e85e844", "external-id": "nsx-vlan-transportzone-182", "segmentation_id": 182, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3319916-95", "ovs_interfaceid": "f3319916-956f-49ba-9da5-ad0df9c5953c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 599.478924] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 599.478924] env[65758]: value = "task-4659833" [ 599.478924] env[65758]: _type = "Task" [ 599.478924] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.486812] env[65758]: DEBUG nova.policy [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f16c6fa73284e8696df370f862e6366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdaabf2897064b5a948dbdb6d5921d76', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 599.499276] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquiring lock "e48a075b-41b3-4612-bd5f-0a158d707a2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.499534] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "e48a075b-41b3-4612-bd5f-0a158d707a2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.499790] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659833, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.785500] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659831, 'name': Rename_Task, 'duration_secs': 0.214376} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.786607] env[65758]: DEBUG nova.compute.manager [req-9198013d-011b-4942-b983-e180cd198f2b req-bca19ed1-8d85-49fd-b974-6ad03dc79994 service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Received event network-vif-plugged-83c16429-d108-4a97-84ec-81e4398f9881 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 599.787791] env[65758]: DEBUG oslo_concurrency.lockutils [req-9198013d-011b-4942-b983-e180cd198f2b req-bca19ed1-8d85-49fd-b974-6ad03dc79994 service nova] Acquiring lock "e4540963-7be9-426e-90f8-b31524d2237b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.787791] env[65758]: DEBUG oslo_concurrency.lockutils [req-9198013d-011b-4942-b983-e180cd198f2b req-bca19ed1-8d85-49fd-b974-6ad03dc79994 service nova] Lock "e4540963-7be9-426e-90f8-b31524d2237b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.787791] env[65758]: DEBUG oslo_concurrency.lockutils [req-9198013d-011b-4942-b983-e180cd198f2b req-bca19ed1-8d85-49fd-b974-6ad03dc79994 service nova] Lock "e4540963-7be9-426e-90f8-b31524d2237b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.787791] env[65758]: DEBUG nova.compute.manager [req-9198013d-011b-4942-b983-e180cd198f2b req-bca19ed1-8d85-49fd-b974-6ad03dc79994 service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] No waiting events found dispatching network-vif-plugged-83c16429-d108-4a97-84ec-81e4398f9881 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 599.787791] env[65758]: WARNING nova.compute.manager [req-9198013d-011b-4942-b983-e180cd198f2b req-bca19ed1-8d85-49fd-b974-6ad03dc79994 service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Received unexpected event network-vif-plugged-83c16429-d108-4a97-84ec-81e4398f9881 for instance with vm_state building and task_state spawning. [ 599.788023] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 599.788450] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f98e00b3-f284-495a-8cf7-643ce03ca15c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.799460] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 599.799460] env[65758]: value = "task-4659834" [ 599.799460] env[65758]: _type = "Task" [ 599.799460] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.814651] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659834, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.839182] env[65758]: DEBUG nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 599.920726] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659828, 'name': CreateVM_Task, 'duration_secs': 0.70828} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.920948] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659832, 'name': Rename_Task, 'duration_secs': 0.224076} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.924792] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 599.925132] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 599.929104] env[65758]: WARNING neutronclient.v2_0.client [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 599.929547] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.929701] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.930038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 599.930322] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56c9f8ee-218f-4f3b-96ff-7defd25ea357 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.935741] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1449d50-c89c-4bde-a25d-fd8f13c72300 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.937915] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678478} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.940842] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974/a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 599.941053] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 599.941811] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12bd870a-1af5-41d3-bef7-5667c0ca087b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.945574] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 599.945574] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5237ba07-1f79-b4ae-0689-7293839a154c" [ 599.945574] env[65758]: _type = "Task" [ 599.945574] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.947223] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 599.947223] env[65758]: value = "task-4659835" [ 599.947223] env[65758]: _type = "Task" [ 599.947223] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.955159] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 599.955159] env[65758]: value = "task-4659836" [ 599.955159] env[65758]: _type = "Task" [ 599.955159] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.968519] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5237ba07-1f79-b4ae-0689-7293839a154c, 'name': SearchDatastore_Task, 'duration_secs': 0.01137} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.968804] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.970870] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.971122] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 599.971381] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.971525] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.971698] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 599.978365] env[65758]: DEBUG nova.network.neutron [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Successfully created port: 4741e651-cd1e-4ea0-b378-213efedb59d4 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 599.980828] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6510cff1-2f41-4057-99bc-85190c3a3df7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.985102] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "2d787237-26e5-4519-9f6e-1d30b9d016cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.985341] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "2d787237-26e5-4519-9f6e-1d30b9d016cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.985597] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659836, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.987685] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Releasing lock "refresh_cache-f1a1650b-4c45-47fc-9c45-f4625c959597" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.987685] env[65758]: DEBUG nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Instance network_info: |[{"id": "f3319916-956f-49ba-9da5-ad0df9c5953c", "address": "fa:16:3e:16:f7:af", "network": {"id": "eb6c4afb-5353-490f-877a-96738c4fa9e3", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1051072804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5af9eac7501241d68c48c140efa1a19b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41f66e20-fd86-4158-bbdc-7a150e85e844", "external-id": "nsx-vlan-transportzone-182", "segmentation_id": 182, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3319916-95", "ovs_interfaceid": "f3319916-956f-49ba-9da5-ad0df9c5953c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 599.992034] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:f7:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41f66e20-fd86-4158-bbdc-7a150e85e844', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3319916-956f-49ba-9da5-ad0df9c5953c', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 600.001662] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Creating folder: Project (5af9eac7501241d68c48c140efa1a19b). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 600.007964] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7a97896-254a-471e-970f-b58118012563 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.016593] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659833, 'name': ReconfigVM_Task, 'duration_secs': 0.32805} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.017855] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 549673ec-3d75-4aad-a001-014f3f53a6b0/549673ec-3d75-4aad-a001-014f3f53a6b0.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.018269] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.018412] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 600.019603] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9ec1f98-095d-47df-a92f-e649a57b302d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.021588] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-524c46a4-a68c-48ed-94f8-f5a6acd8a60a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.030821] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 600.030821] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52884cab-9d60-1be5-d23c-8fd8603812c2" [ 600.030821] env[65758]: _type = "Task" [ 600.030821] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.034878] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 600.034878] env[65758]: value = "task-4659838" [ 600.034878] env[65758]: _type = "Task" [ 600.034878] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.041939] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Created folder: Project (5af9eac7501241d68c48c140efa1a19b) in parent group-v909763. [ 600.042290] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Creating folder: Instances. Parent ref: group-v909779. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 600.045556] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c049b7f-b19e-4e8f-abac-28b3e3107137 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.056701] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52884cab-9d60-1be5-d23c-8fd8603812c2, 'name': SearchDatastore_Task, 'duration_secs': 0.022217} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.062953] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659838, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.062953] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f54a67e3-73d9-4ee7-a27a-b628c037d941 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.064200] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Created folder: Instances in parent group-v909779. [ 600.064423] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 600.064622] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 600.065706] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c20cb73-5149-48d5-b4de-f0b4294c83ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.087504] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 600.087504] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525b88cb-e1eb-8d45-9b6b-282880489a0a" [ 600.087504] env[65758]: _type = "Task" [ 600.087504] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.094843] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 600.094843] env[65758]: value = "task-4659840" [ 600.094843] env[65758]: _type = "Task" [ 600.094843] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.098896] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525b88cb-e1eb-8d45-9b6b-282880489a0a, 'name': SearchDatastore_Task, 'duration_secs': 0.010405} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.102096] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.102399] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e4540963-7be9-426e-90f8-b31524d2237b/e4540963-7be9-426e-90f8-b31524d2237b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 600.105080] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f3a352e-5218-4dd2-98f3-27b7e6344b53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.113496] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659840, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.115542] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 600.115542] env[65758]: value = "task-4659841" [ 600.115542] env[65758]: _type = "Task" [ 600.115542] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.125156] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659841, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.152581] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20b0463-d20a-46c3-9153-86fbf77993af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.163330] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79091326-aebd-48ea-adc2-33124516871d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.196661] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b304941a-f09d-4509-b457-8dbb9cb5e2e5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.208035] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094827c9-49ca-43aa-ac46-e8ff2dd51955 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.221166] env[65758]: DEBUG nova.compute.provider_tree [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.312416] env[65758]: DEBUG oslo_vmware.api [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659834, 'name': PowerOnVM_Task, 'duration_secs': 0.509391} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.312775] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 600.313276] env[65758]: INFO nova.compute.manager [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Took 9.85 seconds to spawn the instance on the hypervisor. [ 600.313540] env[65758]: DEBUG nova.compute.manager [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 600.314513] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04632192-364c-48ee-a9b1-cfb1ca46ed44 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.331037] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Received event network-changed-bf0bd4f9-a022-486a-96ba-e2c684bfa941 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 600.331037] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Refreshing instance network info cache due to event network-changed-bf0bd4f9-a022-486a-96ba-e2c684bfa941. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 600.331037] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquiring lock "refresh_cache-67fdb417-62ea-412c-8b82-868d59149f89" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.331037] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquired lock "refresh_cache-67fdb417-62ea-412c-8b82-868d59149f89" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.331037] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Refreshing network info cache for port bf0bd4f9-a022-486a-96ba-e2c684bfa941 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 600.472029] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659835, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.472325] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659836, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.554280] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659838, 'name': Rename_Task, 'duration_secs': 0.17196} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.554583] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 600.554888] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef2fa7ad-c87a-4b13-b65b-004b9a548d2f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.564152] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 600.564152] env[65758]: value = "task-4659842" [ 600.564152] env[65758]: _type = "Task" [ 600.564152] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.574887] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659842, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.611736] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659840, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.627117] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659841, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.727504] env[65758]: DEBUG nova.scheduler.client.report [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 600.835109] env[65758]: WARNING neutronclient.v2_0.client [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 600.835818] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 600.836178] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 600.845401] env[65758]: INFO nova.compute.manager [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Took 17.99 seconds to build instance. [ 600.857840] env[65758]: DEBUG nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 600.885888] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 600.886228] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.886445] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 600.886703] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.886915] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 600.887116] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 600.887867] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.887867] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 600.887867] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 600.887867] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 600.887867] env[65758]: DEBUG nova.virt.hardware [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 600.888910] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddf3fe4-a521-45ca-8e3b-34b4bada6d96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.898935] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192192a5-1b17-46bd-abd8-e9344c7bee8e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.965558] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659835, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.971592] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659836, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.746875} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.972377] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 600.972767] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f8cc38-9198-4f3e-a035-a1452ed85c53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.998831] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974/a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 600.999694] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1292570b-4f88-4253-bfd1-97355a63b8bc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.021803] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 601.021803] env[65758]: value = "task-4659843" [ 601.021803] env[65758]: _type = "Task" [ 601.021803] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.034291] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659843, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.075316] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659842, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.109928] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659840, 'name': CreateVM_Task, 'duration_secs': 0.838886} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.110138] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 601.110636] env[65758]: WARNING neutronclient.v2_0.client [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 601.111035] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.111244] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.111559] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 601.111860] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da6ef778-1f13-4582-ad3c-57f3ae7ec0a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.121136] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 601.121136] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fbc614-64e6-c4d2-7054-fb4bc1390d17" [ 601.121136] env[65758]: _type = "Task" [ 601.121136] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.128565] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659841, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527883} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.129269] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e4540963-7be9-426e-90f8-b31524d2237b/e4540963-7be9-426e-90f8-b31524d2237b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 601.130044] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 601.130044] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59c17bd7-860a-4101-b0c4-8751fa85f99b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.135701] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fbc614-64e6-c4d2-7054-fb4bc1390d17, 'name': SearchDatastore_Task, 'duration_secs': 0.012023} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.136320] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.136562] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 601.136818] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.137029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.137211] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 601.137470] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a1fc263-cf2c-48d1-8597-cd387b5eee80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.141170] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 601.141170] env[65758]: value = "task-4659844" [ 601.141170] env[65758]: _type = "Task" [ 601.141170] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.146696] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 601.146927] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 601.151246] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a51b5310-2a5c-46c7-b0bc-17de3e2404c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.153778] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659844, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.158026] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 601.158026] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ef4bc6-280f-6ff4-1693-ce9937ac793d" [ 601.158026] env[65758]: _type = "Task" [ 601.158026] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.167539] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ef4bc6-280f-6ff4-1693-ce9937ac793d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.233799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.234317] env[65758]: DEBUG nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 601.237552] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.826s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.348019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5847413a-f28d-43c2-bb3e-a7be113f7cfc tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "9e16d31b-e84c-448b-9d83-98cac49570a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.502s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.462943] env[65758]: DEBUG oslo_vmware.api [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4659835, 'name': PowerOnVM_Task, 'duration_secs': 1.176711} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.463234] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 601.463439] env[65758]: INFO nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Took 15.48 seconds to spawn the instance on the hypervisor. [ 601.463609] env[65758]: DEBUG nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 601.464441] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a74a24-9722-4dff-997f-094f79927c77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.538448] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659843, 'name': ReconfigVM_Task, 'duration_secs': 0.307766} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.539143] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Reconfigured VM instance instance-00000004 to attach disk [datastore2] a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974/a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 601.539923] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-416807ff-feab-431b-a23e-d7135e05db88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.549742] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 601.549742] env[65758]: value = "task-4659845" [ 601.549742] env[65758]: _type = "Task" [ 601.549742] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.560391] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659845, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.576300] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659842, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.616781] env[65758]: DEBUG nova.network.neutron [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Successfully updated port: 4741e651-cd1e-4ea0-b378-213efedb59d4 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 601.654197] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659844, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068497} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.654557] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 601.655475] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48684f93-3db4-4e52-bc4e-67641a0cab5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.682215] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] e4540963-7be9-426e-90f8-b31524d2237b/e4540963-7be9-426e-90f8-b31524d2237b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 601.683430] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd272cab-dbbd-48d1-8852-6807d74c03bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.706679] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ef4bc6-280f-6ff4-1693-ce9937ac793d, 'name': SearchDatastore_Task, 'duration_secs': 0.01059} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.708227] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09d41b6a-9332-42ee-ac2c-8c262c8e7279 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.719240] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 601.719240] env[65758]: value = "task-4659846" [ 601.719240] env[65758]: _type = "Task" [ 601.719240] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.726096] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 601.726096] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528225c6-e9c4-922f-73bc-3eaf3ab087c9" [ 601.726096] env[65758]: _type = "Task" [ 601.726096] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.733505] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659846, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.748455] env[65758]: DEBUG nova.compute.utils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 601.748455] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528225c6-e9c4-922f-73bc-3eaf3ab087c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.750521] env[65758]: DEBUG nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 601.750763] env[65758]: DEBUG nova.network.neutron [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 601.751058] env[65758]: WARNING neutronclient.v2_0.client [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 601.751313] env[65758]: WARNING neutronclient.v2_0.client [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 601.751884] env[65758]: WARNING openstack [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 601.753687] env[65758]: WARNING openstack [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 601.815021] env[65758]: DEBUG nova.policy [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3816e419a1704f46896c4942d0dad734', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '06aa7ad9cf4f4f528687bbd3e6d12b0d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 601.851474] env[65758]: DEBUG nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 601.930476] env[65758]: WARNING neutronclient.v2_0.client [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 601.933554] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 601.933554] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 601.991797] env[65758]: INFO nova.compute.manager [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Took 20.38 seconds to build instance. [ 602.068131] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659845, 'name': Rename_Task, 'duration_secs': 0.204095} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.073465] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 602.074018] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1223232b-adff-4741-bdbf-c404951ff3f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.085496] env[65758]: DEBUG oslo_vmware.api [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659842, 'name': PowerOnVM_Task, 'duration_secs': 1.093629} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.087860] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 602.087860] env[65758]: INFO nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Took 13.86 seconds to spawn the instance on the hypervisor. [ 602.088095] env[65758]: DEBUG nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 602.088509] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 602.088509] env[65758]: value = "task-4659847" [ 602.088509] env[65758]: _type = "Task" [ 602.088509] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.089591] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c54c69d-9137-4639-a729-b0b26313c38e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.106094] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659847, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.120941] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.120941] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.121255] env[65758]: DEBUG nova.network.neutron [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 602.236599] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659846, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.243533] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528225c6-e9c4-922f-73bc-3eaf3ab087c9, 'name': SearchDatastore_Task, 'duration_secs': 0.017185} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.246651] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.246651] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] f1a1650b-4c45-47fc-9c45-f4625c959597/f1a1650b-4c45-47fc-9c45-f4625c959597.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 602.246651] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06c8b56d-77c4-4d1d-9436-93ffe54613a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.255124] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 602.255124] env[65758]: value = "task-4659848" [ 602.255124] env[65758]: _type = "Task" [ 602.255124] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.259957] env[65758]: DEBUG nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 602.267929] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659848, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.281882] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 67fdb417-62ea-412c-8b82-868d59149f89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 602.282044] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 549673ec-3d75-4aad-a001-014f3f53a6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 602.282213] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 9e16d31b-e84c-448b-9d83-98cac49570a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 602.282287] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 602.282337] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e4540963-7be9-426e-90f8-b31524d2237b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 602.282675] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance f1a1650b-4c45-47fc-9c45-f4625c959597 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 602.282675] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 83fa942b-a195-4bcb-9ed5-5bb6764220a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 602.282921] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance adc1b956-1b5a-4272-b0ff-95a565e9c45c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 602.298388] env[65758]: DEBUG nova.network.neutron [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Successfully created port: fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 602.378936] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.414929] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Updated VIF entry in instance network info cache for port bf0bd4f9-a022-486a-96ba-e2c684bfa941. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 602.415473] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Updating instance_info_cache with network_info: [{"id": "bf0bd4f9-a022-486a-96ba-e2c684bfa941", "address": "fa:16:3e:a3:16:62", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf0bd4f9-a0", "ovs_interfaceid": "bf0bd4f9-a022-486a-96ba-e2c684bfa941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 602.475666] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquiring lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.475820] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.495234] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3eb0ff42-0656-4cb4-b965-9897d071910a tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "67fdb417-62ea-412c-8b82-868d59149f89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.895s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.606342] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659847, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.624887] env[65758]: INFO nova.compute.manager [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Took 20.27 seconds to build instance. [ 602.629385] env[65758]: WARNING openstack [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 602.629385] env[65758]: WARNING openstack [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 602.733552] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659846, 'name': ReconfigVM_Task, 'duration_secs': 0.63508} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.733552] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Reconfigured VM instance instance-00000005 to attach disk [datastore2] e4540963-7be9-426e-90f8-b31524d2237b/e4540963-7be9-426e-90f8-b31524d2237b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 602.733552] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d48b6069-c88a-49d2-a619-31535d7810aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.743311] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 602.743311] env[65758]: value = "task-4659849" [ 602.743311] env[65758]: _type = "Task" [ 602.743311] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.753992] env[65758]: DEBUG nova.network.neutron [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 602.766814] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659849, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.775959] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659848, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.788847] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 602.918670] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Releasing lock "refresh_cache-67fdb417-62ea-412c-8b82-868d59149f89" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.919029] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Received event network-vif-plugged-3d50d517-3f1a-4b04-a81d-54672953d4c6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 602.919303] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquiring lock "549673ec-3d75-4aad-a001-014f3f53a6b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.919599] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Lock "549673ec-3d75-4aad-a001-014f3f53a6b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.919894] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Lock "549673ec-3d75-4aad-a001-014f3f53a6b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.920089] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] No waiting events found dispatching network-vif-plugged-3d50d517-3f1a-4b04-a81d-54672953d4c6 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 602.920190] env[65758]: WARNING nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Received unexpected event network-vif-plugged-3d50d517-3f1a-4b04-a81d-54672953d4c6 for instance with vm_state building and task_state spawning. [ 602.920419] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Received event network-changed-3d50d517-3f1a-4b04-a81d-54672953d4c6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 602.920580] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Refreshing instance network info cache due to event network-changed-3d50d517-3f1a-4b04-a81d-54672953d4c6. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 602.920826] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquiring lock "refresh_cache-549673ec-3d75-4aad-a001-014f3f53a6b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.920956] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquired lock "refresh_cache-549673ec-3d75-4aad-a001-014f3f53a6b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.921790] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Refreshing network info cache for port 3d50d517-3f1a-4b04-a81d-54672953d4c6 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 602.999030] env[65758]: DEBUG nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 603.105940] env[65758]: DEBUG oslo_vmware.api [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659847, 'name': PowerOnVM_Task, 'duration_secs': 0.813419} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.106672] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.106672] env[65758]: INFO nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Took 10.42 seconds to spawn the instance on the hypervisor. [ 603.106833] env[65758]: DEBUG nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 603.108327] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60000fdb-d5fd-4733-b9c3-46166d6917c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.127337] env[65758]: DEBUG oslo_concurrency.lockutils [None req-254b27c5-06d9-4d78-b31b-637b04283c96 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "549673ec-3d75-4aad-a001-014f3f53a6b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.777s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.156300] env[65758]: WARNING neutronclient.v2_0.client [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 603.156300] env[65758]: WARNING openstack [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 603.156562] env[65758]: WARNING openstack [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 603.253696] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659849, 'name': Rename_Task, 'duration_secs': 0.220702} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.254078] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 603.254403] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9f45546-328d-480d-8666-17fc975c98f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.269287] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659848, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675103} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.270926] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] f1a1650b-4c45-47fc-9c45-f4625c959597/f1a1650b-4c45-47fc-9c45-f4625c959597.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 603.271213] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 603.271600] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 603.271600] env[65758]: value = "task-4659850" [ 603.271600] env[65758]: _type = "Task" [ 603.271600] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.271812] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b3fa639-2409-42cd-ac75-12871e04e217 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.279893] env[65758]: DEBUG nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 603.285430] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 603.285430] env[65758]: value = "task-4659851" [ 603.285430] env[65758]: _type = "Task" [ 603.285430] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.295550] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 37aadd44-79e8-4479-862f-265549c9d802 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 603.310341] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659851, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.312794] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 603.313293] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 603.313293] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 603.313415] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 603.313560] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 603.313710] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 603.313911] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.314075] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 603.314249] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 603.314409] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 603.314587] env[65758]: DEBUG nova.virt.hardware [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 603.315919] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462c635b-cf5e-4fa3-bd55-2cd0cceeeb50 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.327463] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5195347-c79c-4323-a296-001f6f33f4a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.425987] env[65758]: WARNING neutronclient.v2_0.client [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 603.426983] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 603.427786] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 603.530533] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.630637] env[65758]: INFO nova.compute.manager [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Took 19.40 seconds to build instance. [ 603.631417] env[65758]: DEBUG nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 603.764319] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.764596] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.788882] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659850, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.805461] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e48a075b-41b3-4612-bd5f-0a158d707a2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 603.805533] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659851, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.226486} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.808689] env[65758]: DEBUG nova.network.neutron [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance_info_cache with network_info: [{"id": "4741e651-cd1e-4ea0-b378-213efedb59d4", "address": "fa:16:3e:9f:a7:58", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4741e651-cd", "ovs_interfaceid": "4741e651-cd1e-4ea0-b378-213efedb59d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 603.810861] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 603.811475] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d63bf3-36b9-41f5-bab1-a050f5390cbb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.840330] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] f1a1650b-4c45-47fc-9c45-f4625c959597/f1a1650b-4c45-47fc-9c45-f4625c959597.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 603.844418] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57c17d30-8337-46c2-b902-756e0d38d034 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.868558] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 603.868558] env[65758]: value = "task-4659852" [ 603.868558] env[65758]: _type = "Task" [ 603.868558] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.882472] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659852, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.086971] env[65758]: DEBUG nova.network.neutron [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Successfully updated port: fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 604.125977] env[65758]: WARNING neutronclient.v2_0.client [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 604.125977] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 604.125977] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 604.136476] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5bca26e-6fdc-45b2-8887-dc6440ac06e7 tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.922s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.178126] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.219484] env[65758]: DEBUG nova.compute.manager [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Received event network-changed-83c16429-d108-4a97-84ec-81e4398f9881 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 604.219936] env[65758]: DEBUG nova.compute.manager [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Refreshing instance network info cache due to event network-changed-83c16429-d108-4a97-84ec-81e4398f9881. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 604.219936] env[65758]: DEBUG oslo_concurrency.lockutils [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] Acquiring lock "refresh_cache-e4540963-7be9-426e-90f8-b31524d2237b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.220171] env[65758]: DEBUG oslo_concurrency.lockutils [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] Acquired lock "refresh_cache-e4540963-7be9-426e-90f8-b31524d2237b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.220260] env[65758]: DEBUG nova.network.neutron [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Refreshing network info cache for port 83c16429-d108-4a97-84ec-81e4398f9881 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 604.244394] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Updated VIF entry in instance network info cache for port 3d50d517-3f1a-4b04-a81d-54672953d4c6. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 604.244394] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Updating instance_info_cache with network_info: [{"id": "3d50d517-3f1a-4b04-a81d-54672953d4c6", "address": "fa:16:3e:6b:10:71", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d50d517-3f", "ovs_interfaceid": "3d50d517-3f1a-4b04-a81d-54672953d4c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 604.291691] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659850, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.312967] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 2d787237-26e5-4519-9f6e-1d30b9d016cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 604.315997] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.317342] env[65758]: DEBUG nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Instance network_info: |[{"id": "4741e651-cd1e-4ea0-b378-213efedb59d4", "address": "fa:16:3e:9f:a7:58", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4741e651-cd", "ovs_interfaceid": "4741e651-cd1e-4ea0-b378-213efedb59d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 604.317595] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:a7:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4741e651-cd1e-4ea0-b378-213efedb59d4', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.332936] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Creating folder: Project (cdaabf2897064b5a948dbdb6d5921d76). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.333668] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff7fc587-4e15-4f7c-92fa-a7323fc45909 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.352044] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Created folder: Project (cdaabf2897064b5a948dbdb6d5921d76) in parent group-v909763. [ 604.352249] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Creating folder: Instances. Parent ref: group-v909782. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.352556] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e553d08-3fae-4643-ba2f-1b31ad2c2a29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.368887] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Created folder: Instances in parent group-v909782. [ 604.369263] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 604.369596] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 604.373829] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e5793fb-3390-4211-818a-8d028db81cf0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.403133] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659852, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.404833] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.404833] env[65758]: value = "task-4659855" [ 604.404833] env[65758]: _type = "Task" [ 604.404833] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.415303] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659855, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.431763] env[65758]: DEBUG nova.compute.manager [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Received event network-vif-plugged-4741e651-cd1e-4ea0-b378-213efedb59d4 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 604.432029] env[65758]: DEBUG oslo_concurrency.lockutils [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Acquiring lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.432356] env[65758]: DEBUG oslo_concurrency.lockutils [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.432550] env[65758]: DEBUG oslo_concurrency.lockutils [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.432718] env[65758]: DEBUG nova.compute.manager [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] No waiting events found dispatching network-vif-plugged-4741e651-cd1e-4ea0-b378-213efedb59d4 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 604.432974] env[65758]: WARNING nova.compute.manager [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Received unexpected event network-vif-plugged-4741e651-cd1e-4ea0-b378-213efedb59d4 for instance with vm_state building and task_state spawning. [ 604.433183] env[65758]: DEBUG nova.compute.manager [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Received event network-changed-4741e651-cd1e-4ea0-b378-213efedb59d4 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 604.434054] env[65758]: DEBUG nova.compute.manager [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Refreshing instance network info cache due to event network-changed-4741e651-cd1e-4ea0-b378-213efedb59d4. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 604.434054] env[65758]: DEBUG oslo_concurrency.lockutils [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Acquiring lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.434054] env[65758]: DEBUG oslo_concurrency.lockutils [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Acquired lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.434421] env[65758]: DEBUG nova.network.neutron [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Refreshing network info cache for port 4741e651-cd1e-4ea0-b378-213efedb59d4 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 604.592229] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.593121] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquired lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.593121] env[65758]: DEBUG nova.network.neutron [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 604.642858] env[65758]: DEBUG nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 604.725850] env[65758]: WARNING neutronclient.v2_0.client [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 604.726503] env[65758]: WARNING openstack [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 604.726796] env[65758]: WARNING openstack [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 604.748763] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Releasing lock "refresh_cache-549673ec-3d75-4aad-a001-014f3f53a6b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.748763] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Received event network-vif-plugged-af073ff0-f4c1-43b4-bf05-beb5e71db8ac {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 604.748763] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquiring lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.748763] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.748763] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.748949] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] No waiting events found dispatching network-vif-plugged-af073ff0-f4c1-43b4-bf05-beb5e71db8ac {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 604.748949] env[65758]: WARNING nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Received unexpected event network-vif-plugged-af073ff0-f4c1-43b4-bf05-beb5e71db8ac for instance with vm_state building and task_state spawning. [ 604.749120] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Received event network-changed-af073ff0-f4c1-43b4-bf05-beb5e71db8ac {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 604.749321] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Refreshing instance network info cache due to event network-changed-af073ff0-f4c1-43b4-bf05-beb5e71db8ac. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 604.749631] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquiring lock "refresh_cache-a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.749785] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquired lock "refresh_cache-a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.749955] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Refreshing network info cache for port af073ff0-f4c1-43b4-bf05-beb5e71db8ac {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 604.793632] env[65758]: DEBUG oslo_vmware.api [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659850, 'name': PowerOnVM_Task, 'duration_secs': 1.110272} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.796337] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 604.797466] env[65758]: INFO nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Took 9.72 seconds to spawn the instance on the hypervisor. [ 604.797466] env[65758]: DEBUG nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 604.798574] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df44ef4f-5fc6-476b-99cd-a73796311f73 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.819328] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e60efbcd-1c4e-40a1-8bc1-893daa511073 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 604.820923] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 604.820923] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=100GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '8', 'num_vm_building': '7', 'num_task_spawning': '6', 'num_os_type_None': '8', 'num_proj_aab1df827abb49b88b951d30ba485d39': '1', 'io_workload': '7', 'num_proj_d999e5f3384e4a24ad9ec68b2fa3fda7': '1', 'num_vm_active': '1', 'num_task_None': '2', 'num_proj_b0a2aad73cc5476c9543e385e04d1d36': '1', 'num_proj_3fd9b52de8d045e2b42b8646ea659584': '1', 'num_proj_57c8e01c5bb14062ae9c179e013a1c1e': '1', 'num_proj_5af9eac7501241d68c48c140efa1a19b': '1', 'num_proj_cdaabf2897064b5a948dbdb6d5921d76': '1', 'num_proj_06aa7ad9cf4f4f528687bbd3e6d12b0d': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 604.889824] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659852, 'name': ReconfigVM_Task, 'duration_secs': 0.724187} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.895583] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Reconfigured VM instance instance-00000006 to attach disk [datastore2] f1a1650b-4c45-47fc-9c45-f4625c959597/f1a1650b-4c45-47fc-9c45-f4625c959597.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 604.895583] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0abf227-2932-452d-b3d5-686f75ea967d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.920966] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 604.920966] env[65758]: value = "task-4659856" [ 604.920966] env[65758]: _type = "Task" [ 604.920966] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.933882] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659855, 'name': CreateVM_Task, 'duration_secs': 0.467499} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.935026] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 604.935654] env[65758]: WARNING neutronclient.v2_0.client [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 604.937037] env[65758]: DEBUG oslo_vmware.service [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ab05cd-8737-4536-a9cd-00b0419a15b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.956268] env[65758]: WARNING neutronclient.v2_0.client [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 604.957305] env[65758]: WARNING openstack [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 604.958058] env[65758]: WARNING openstack [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 604.970079] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659856, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.971899] env[65758]: DEBUG nova.compute.manager [None req-9489c688-bd96-423a-85a9-21a6b60cf1e2 tempest-ServerDiagnosticsV248Test-1955092603 tempest-ServerDiagnosticsV248Test-1955092603-project-admin] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 604.975256] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a75829-5bbd-4a21-a5f1-d81a7cfe3250 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.982482] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.982805] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.983301] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 604.984128] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9c5e70a-b3b8-4cf6-bc02-761ad3b8a55f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.995887] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 604.995887] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528d95e2-633a-e538-4bad-ecbd221b39ea" [ 604.995887] env[65758]: _type = "Task" [ 604.995887] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.998768] env[65758]: INFO nova.compute.manager [None req-9489c688-bd96-423a-85a9-21a6b60cf1e2 tempest-ServerDiagnosticsV248Test-1955092603 tempest-ServerDiagnosticsV248Test-1955092603-project-admin] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Retrieving diagnostics [ 605.005247] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928202e4-a34f-4a1e-8eb8-220ae6e60b44 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.065021] env[65758]: WARNING neutronclient.v2_0.client [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 605.065021] env[65758]: WARNING openstack [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 605.065021] env[65758]: WARNING openstack [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 605.073921] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.074278] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.074418] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.074542] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.074708] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.077976] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-744b322c-45c7-4e9f-bc95-fda3dcaf0987 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.091913] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.092121] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 605.093659] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f917ad4-6f5a-47cb-9483-1e1a2f6d5c77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.099730] env[65758]: WARNING openstack [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 605.100486] env[65758]: WARNING openstack [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 605.114208] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f36abe-372b-4b5f-ab3f-b5b5b6cd6e61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.121215] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 605.121215] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52aec2b6-53a4-461a-358b-9c8cf0f4c34c" [ 605.121215] env[65758]: _type = "Task" [ 605.121215] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.131729] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Acquiring lock "67fdb417-62ea-412c-8b82-868d59149f89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.132220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Lock "67fdb417-62ea-412c-8b82-868d59149f89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.132220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Acquiring lock "67fdb417-62ea-412c-8b82-868d59149f89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.132531] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Lock "67fdb417-62ea-412c-8b82-868d59149f89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.132531] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Lock "67fdb417-62ea-412c-8b82-868d59149f89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.135263] env[65758]: INFO nova.compute.manager [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Terminating instance [ 605.142484] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Preparing fetch location {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 605.142769] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Creating directory with path [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.143037] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3647295-d455-49b3-947a-374fb236a804 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.172122] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.185738] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Created directory with path [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.186025] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Fetch image to [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 605.186121] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Downloading image file data 75a6399b-5100-4c51-b5cf-162bd505a28f to [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk on the data store datastore1 {{(pid=65758) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 605.187358] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a91a81-4f22-46e2-b51e-ee8f8d77b328 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.200494] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ca3b3c-4192-47b6-9ad1-d7246267cb11 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.209791] env[65758]: DEBUG nova.network.neutron [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 605.219895] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de60d3b-bea2-4756-b2ed-3e70b3b3eaa3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.228410] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f218229-ac30-40a2-85ac-c7d01c257e26 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.233671] env[65758]: DEBUG nova.network.neutron [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Updated VIF entry in instance network info cache for port 83c16429-d108-4a97-84ec-81e4398f9881. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 605.234466] env[65758]: DEBUG nova.network.neutron [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Updating instance_info_cache with network_info: [{"id": "83c16429-d108-4a97-84ec-81e4398f9881", "address": "fa:16:3e:f5:f8:b7", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c16429-d1", "ovs_interfaceid": "83c16429-d108-4a97-84ec-81e4398f9881", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 605.268039] env[65758]: WARNING neutronclient.v2_0.client [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 605.268691] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 605.269091] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 605.283573] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6427a0-f2b0-4985-bf01-6024b02279c8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.290926] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba01f3f4-da02-46e1-9dbb-199035239df7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.327488] env[65758]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d64f3f51-bc77-498a-8874-0209616b5e68 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.341108] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c562044a-062d-47cc-bdab-92d370208dc1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.344711] env[65758]: INFO nova.compute.manager [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Took 21.11 seconds to build instance. [ 605.356382] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81be60a2-c030-4929-8f09-8a707dc791f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.373321] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.379027] env[65758]: WARNING neutronclient.v2_0.client [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 605.379027] env[65758]: WARNING openstack [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 605.379110] env[65758]: WARNING openstack [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 605.392401] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Downloading image file data 75a6399b-5100-4c51-b5cf-162bd505a28f to the data store datastore1 {{(pid=65758) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 605.418875] env[65758]: WARNING neutronclient.v2_0.client [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 605.421025] env[65758]: WARNING openstack [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 605.421212] env[65758]: WARNING openstack [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 605.448665] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659856, 'name': Rename_Task, 'duration_secs': 0.394246} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.448824] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 605.449085] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c76db06a-b815-41b9-855a-c12020a413de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.457887] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 605.457887] env[65758]: value = "task-4659857" [ 605.457887] env[65758]: _type = "Task" [ 605.457887] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.477378] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659857, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.488791] env[65758]: DEBUG oslo_vmware.rw_handles [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=65758) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 605.572280] env[65758]: DEBUG nova.network.neutron [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updated VIF entry in instance network info cache for port 4741e651-cd1e-4ea0-b378-213efedb59d4. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 605.573208] env[65758]: DEBUG nova.network.neutron [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance_info_cache with network_info: [{"id": "4741e651-cd1e-4ea0-b378-213efedb59d4", "address": "fa:16:3e:9f:a7:58", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4741e651-cd", "ovs_interfaceid": "4741e651-cd1e-4ea0-b378-213efedb59d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 605.636819] env[65758]: DEBUG nova.network.neutron [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updating instance_info_cache with network_info: [{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 605.643244] env[65758]: DEBUG nova.compute.manager [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 605.643677] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.644754] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f6fc62-e0fd-441b-ab2e-9d7a772f79a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.657300] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 605.662489] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a1e71c2-7d30-4e95-9eb5-5933f7d5910c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.673422] env[65758]: DEBUG oslo_vmware.api [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Waiting for the task: (returnval){ [ 605.673422] env[65758]: value = "task-4659858" [ 605.673422] env[65758]: _type = "Task" [ 605.673422] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.692278] env[65758]: DEBUG oslo_vmware.api [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Task: {'id': task-4659858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.726305] env[65758]: WARNING neutronclient.v2_0.client [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 605.726738] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 605.727264] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 605.739415] env[65758]: DEBUG oslo_concurrency.lockutils [req-531b5fd4-3597-45f4-b525-42b3e5cfa5a7 req-3e4c06be-4219-4670-a61f-75bdc23d654f service nova] Releasing lock "refresh_cache-e4540963-7be9-426e-90f8-b31524d2237b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.842832] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Updated VIF entry in instance network info cache for port af073ff0-f4c1-43b4-bf05-beb5e71db8ac. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 605.843420] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Updating instance_info_cache with network_info: [{"id": "af073ff0-f4c1-43b4-bf05-beb5e71db8ac", "address": "fa:16:3e:6e:d0:9d", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf073ff0-f4", "ovs_interfaceid": "af073ff0-f4c1-43b4-bf05-beb5e71db8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 605.850344] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cb0564ad-f42a-466a-8809-4bc9855cb460 tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "e4540963-7be9-426e-90f8-b31524d2237b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.634s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.876918] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 605.972040] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "0ac196fa-d88c-45a8-999e-8b5216912041" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.972040] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "0ac196fa-d88c-45a8-999e-8b5216912041" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.986228] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659857, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.076808] env[65758]: DEBUG oslo_concurrency.lockutils [req-9035a43d-bf47-48bc-a17a-97297a42369c req-562a8138-dabe-4eaf-921e-439e338e22ad service nova] Releasing lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.142904] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Releasing lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.143422] env[65758]: DEBUG nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Instance network_info: |[{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 606.143973] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:f5:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb1e683c-095a-4512-a0a0-ec651a275ab8', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.152358] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Creating folder: Project (06aa7ad9cf4f4f528687bbd3e6d12b0d). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.155889] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5be99b78-c464-4eb9-9da2-7018a7cbefd6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.168719] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Created folder: Project (06aa7ad9cf4f4f528687bbd3e6d12b0d) in parent group-v909763. [ 606.168719] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Creating folder: Instances. Parent ref: group-v909785. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.169383] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bdea2e45-d85a-4abe-a167-16e2e9c4c15c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.196587] env[65758]: DEBUG oslo_vmware.api [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Task: {'id': task-4659858, 'name': PowerOffVM_Task, 'duration_secs': 0.214996} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.198624] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 606.198725] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 606.198962] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Created folder: Instances in parent group-v909785. [ 606.199194] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 606.199769] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca40d47e-6422-44d5-972f-928e08abf9aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.201154] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 606.201395] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a087205-685f-4096-ae9a-61dc0b247ff3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.226801] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.226801] env[65758]: value = "task-4659862" [ 606.226801] env[65758]: _type = "Task" [ 606.226801] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.237264] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659862, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.298215] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 606.298402] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 606.298609] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Deleting the datastore file [datastore2] 67fdb417-62ea-412c-8b82-868d59149f89 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 606.298946] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cbc9953f-279b-4835-a527-0d34ab9ac549 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.308783] env[65758]: DEBUG oslo_vmware.api [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Waiting for the task: (returnval){ [ 606.308783] env[65758]: value = "task-4659863" [ 606.308783] env[65758]: _type = "Task" [ 606.308783] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.317854] env[65758]: DEBUG oslo_vmware.api [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Task: {'id': task-4659863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.347978] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Releasing lock "refresh_cache-a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.349153] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Received event network-vif-plugged-f3319916-956f-49ba-9da5-ad0df9c5953c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 606.349508] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquiring lock "f1a1650b-4c45-47fc-9c45-f4625c959597-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.349760] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Lock "f1a1650b-4c45-47fc-9c45-f4625c959597-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.350178] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Lock "f1a1650b-4c45-47fc-9c45-f4625c959597-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.350178] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] No waiting events found dispatching network-vif-plugged-f3319916-956f-49ba-9da5-ad0df9c5953c {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 606.350345] env[65758]: WARNING nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Received unexpected event network-vif-plugged-f3319916-956f-49ba-9da5-ad0df9c5953c for instance with vm_state building and task_state spawning. [ 606.350511] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Received event network-changed-f3319916-956f-49ba-9da5-ad0df9c5953c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 606.350666] env[65758]: DEBUG nova.compute.manager [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Refreshing instance network info cache due to event network-changed-f3319916-956f-49ba-9da5-ad0df9c5953c. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 606.350886] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquiring lock "refresh_cache-f1a1650b-4c45-47fc-9c45-f4625c959597" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.351032] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Acquired lock "refresh_cache-f1a1650b-4c45-47fc-9c45-f4625c959597" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.351163] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Refreshing network info cache for port f3319916-956f-49ba-9da5-ad0df9c5953c {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 606.384474] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 606.384787] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.147s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.385152] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.189s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.386956] env[65758]: INFO nova.compute.claims [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 606.473514] env[65758]: DEBUG oslo_vmware.api [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659857, 'name': PowerOnVM_Task, 'duration_secs': 0.569376} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.473753] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 606.474199] env[65758]: INFO nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Took 9.11 seconds to spawn the instance on the hypervisor. [ 606.474199] env[65758]: DEBUG nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 606.475329] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a306f42-7afe-4c10-bd4b-74e949aa8b90 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.479971] env[65758]: DEBUG nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 606.618774] env[65758]: DEBUG oslo_vmware.rw_handles [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Completed reading data from the image iterator. {{(pid=65758) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 606.618924] env[65758]: DEBUG oslo_vmware.rw_handles [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 606.701627] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Downloaded image file data 75a6399b-5100-4c51-b5cf-162bd505a28f to vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk on the data store datastore1 {{(pid=65758) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 606.704158] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Caching image {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 606.704754] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Copying Virtual Disk [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk to [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 606.705880] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab34bb5f-1bfd-49d5-bc4d-a862b15e05de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.717938] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 606.717938] env[65758]: value = "task-4659864" [ 606.717938] env[65758]: _type = "Task" [ 606.717938] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.729750] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659864, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.739860] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659862, 'name': CreateVM_Task, 'duration_secs': 0.483311} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.740131] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 606.740737] env[65758]: WARNING neutronclient.v2_0.client [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 606.741111] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.741261] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.741581] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 606.741848] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7852be60-6c3b-4e6f-b8a8-216f2af9fe7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.747818] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 606.747818] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523a5992-9c0c-cba7-1649-96236e5e1e46" [ 606.747818] env[65758]: _type = "Task" [ 606.747818] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.757899] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523a5992-9c0c-cba7-1649-96236e5e1e46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.818596] env[65758]: DEBUG oslo_vmware.api [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Task: {'id': task-4659863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243134} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.818854] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 606.819048] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 606.819227] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 606.819396] env[65758]: INFO nova.compute.manager [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Took 1.18 seconds to destroy the instance on the hypervisor. [ 606.819652] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 606.819856] env[65758]: DEBUG nova.compute.manager [-] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 606.819949] env[65758]: DEBUG nova.network.neutron [-] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 606.820235] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 606.820886] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 606.821228] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 606.854701] env[65758]: WARNING neutronclient.v2_0.client [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 606.855801] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 606.855929] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 607.004569] env[65758]: INFO nova.compute.manager [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Took 21.60 seconds to build instance. [ 607.007981] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.032268] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 607.238040] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659864, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.259582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.259974] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.261631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.302457] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "24379189-b10a-4ef6-a3f6-b7bb43029dab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.302705] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "24379189-b10a-4ef6-a3f6-b7bb43029dab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.510148] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26b8a7d8-f160-45d0-b90e-e6a99b636f1a tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "f1a1650b-4c45-47fc-9c45-f4625c959597" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.120s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.566199] env[65758]: WARNING neutronclient.v2_0.client [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 607.566199] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 607.566199] env[65758]: WARNING openstack [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 607.652408] env[65758]: DEBUG nova.compute.manager [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Received event network-vif-plugged-fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 607.652408] env[65758]: DEBUG oslo_concurrency.lockutils [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Acquiring lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.652408] env[65758]: DEBUG oslo_concurrency.lockutils [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.652408] env[65758]: DEBUG oslo_concurrency.lockutils [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.652408] env[65758]: DEBUG nova.compute.manager [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] No waiting events found dispatching network-vif-plugged-fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 607.652584] env[65758]: WARNING nova.compute.manager [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Received unexpected event network-vif-plugged-fb1e683c-095a-4512-a0a0-ec651a275ab8 for instance with vm_state building and task_state spawning. [ 607.652584] env[65758]: DEBUG nova.compute.manager [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Received event network-changed-fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 607.652584] env[65758]: DEBUG nova.compute.manager [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Refreshing instance network info cache due to event network-changed-fb1e683c-095a-4512-a0a0-ec651a275ab8. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 607.653123] env[65758]: DEBUG oslo_concurrency.lockutils [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Acquiring lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.653668] env[65758]: DEBUG oslo_concurrency.lockutils [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Acquired lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.654034] env[65758]: DEBUG nova.network.neutron [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Refreshing network info cache for port fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 607.739810] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659864, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.768310] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad434a3-f839-4f24-b14f-479835c7ffc2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.779175] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce87bb8d-6dbf-452c-8377-85b1cf9bf584 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.787054] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Updated VIF entry in instance network info cache for port f3319916-956f-49ba-9da5-ad0df9c5953c. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 607.787425] env[65758]: DEBUG nova.network.neutron [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Updating instance_info_cache with network_info: [{"id": "f3319916-956f-49ba-9da5-ad0df9c5953c", "address": "fa:16:3e:16:f7:af", "network": {"id": "eb6c4afb-5353-490f-877a-96738c4fa9e3", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1051072804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5af9eac7501241d68c48c140efa1a19b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41f66e20-fd86-4158-bbdc-7a150e85e844", "external-id": "nsx-vlan-transportzone-182", "segmentation_id": 182, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3319916-95", "ovs_interfaceid": "f3319916-956f-49ba-9da5-ad0df9c5953c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 607.816588] env[65758]: DEBUG oslo_concurrency.lockutils [req-7db6ab40-d867-4175-9afd-dae7cda6fb71 req-0f569779-053b-4bf4-a62a-d40618298e1d service nova] Releasing lock "refresh_cache-f1a1650b-4c45-47fc-9c45-f4625c959597" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.817629] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4fd258-fdf5-43ea-9aec-a1983b754105 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.826942] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da3f130-44c7-48ec-b4c0-8ca1f7687b64 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.845411] env[65758]: DEBUG nova.compute.provider_tree [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.909594] env[65758]: DEBUG nova.network.neutron [-] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 608.019187] env[65758]: DEBUG nova.compute.manager [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 608.162210] env[65758]: WARNING neutronclient.v2_0.client [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 608.163792] env[65758]: WARNING openstack [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 608.163792] env[65758]: WARNING openstack [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 608.231248] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659864, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.295719} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.233770] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Copied Virtual Disk [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk to [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 608.233969] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleting the datastore file [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f/tmp-sparse.vmdk {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 608.234723] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-983a07bc-f401-446f-8e0b-023e12e1254d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.242958] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 608.242958] env[65758]: value = "task-4659865" [ 608.242958] env[65758]: _type = "Task" [ 608.242958] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.255220] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659865, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.349133] env[65758]: DEBUG nova.scheduler.client.report [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 608.355837] env[65758]: DEBUG nova.compute.manager [req-cb4d21e5-37d5-44e5-a3db-593caac4ac10 req-1ce44fd6-cd57-42d6-b46c-a03daaaf34ee service nova] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Received event network-vif-deleted-bf0bd4f9-a022-486a-96ba-e2c684bfa941 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 608.394766] env[65758]: WARNING neutronclient.v2_0.client [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 608.398833] env[65758]: WARNING openstack [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 608.398833] env[65758]: WARNING openstack [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 608.412356] env[65758]: INFO nova.compute.manager [-] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Took 1.59 seconds to deallocate network for instance. [ 608.568030] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.755036] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659865, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043144} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.755278] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.756120] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Moving file from [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584/75a6399b-5100-4c51-b5cf-162bd505a28f to [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f. {{(pid=65758) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 608.756120] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-cb31d193-609f-4d48-9192-ec36cc42c37e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.764102] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 608.764102] env[65758]: value = "task-4659866" [ 608.764102] env[65758]: _type = "Task" [ 608.764102] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.773973] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659866, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.858891] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.859629] env[65758]: DEBUG nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 608.863252] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.415s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.864972] env[65758]: INFO nova.compute.claims [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.921286] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.995452] env[65758]: DEBUG nova.network.neutron [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updated VIF entry in instance network info cache for port fb1e683c-095a-4512-a0a0-ec651a275ab8. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 608.995905] env[65758]: DEBUG nova.network.neutron [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updating instance_info_cache with network_info: [{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 609.277223] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659866, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.036547} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.277507] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] File moved {{(pid=65758) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 609.278097] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Cleaning up location [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584 {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 609.278097] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleting the datastore file [datastore1] vmware_temp/dcd74ee7-3c00-4fdc-8b1d-5dd81e333584 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 609.279133] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76740a88-43ad-4851-b3fb-1fb9c318c2c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.288300] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 609.288300] env[65758]: value = "task-4659867" [ 609.288300] env[65758]: _type = "Task" [ 609.288300] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.298250] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659867, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.371551] env[65758]: DEBUG nova.compute.utils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 609.379202] env[65758]: DEBUG nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 609.379202] env[65758]: DEBUG nova.network.neutron [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 609.379582] env[65758]: WARNING neutronclient.v2_0.client [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 609.380122] env[65758]: WARNING neutronclient.v2_0.client [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 609.380959] env[65758]: WARNING openstack [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 609.381465] env[65758]: WARNING openstack [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 609.503806] env[65758]: DEBUG oslo_concurrency.lockutils [req-4158d7f7-7ab9-4b29-87d3-869243c60960 req-a06e94cf-b5bd-492a-afe6-9a7b42841841 service nova] Releasing lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.588499] env[65758]: DEBUG nova.policy [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68dfe33c4ea24a2e92287db5e8a54d40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdc5591f5fd643b7a836022e19f60b52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 609.732527] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5af5369-1277-46d1-a8a6-bad8aeed73cf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.741828] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188d9590-3f12-4b1b-9159-723688402453 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.777442] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f54b26c-a2db-48c9-950f-c0240c0a64a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.787575] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439fc1c2-8dfc-454f-9a3e-4f6323206b83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.811061] env[65758]: DEBUG nova.compute.provider_tree [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.816690] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659867, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0851} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.819036] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 609.820494] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22ceeedf-f64f-443a-bbfc-2988ec5d2552 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.828389] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 609.828389] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522259b6-406b-234b-0608-67604af1f117" [ 609.828389] env[65758]: _type = "Task" [ 609.828389] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.844517] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522259b6-406b-234b-0608-67604af1f117, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.897409] env[65758]: DEBUG nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 610.137649] env[65758]: DEBUG nova.network.neutron [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Successfully created port: 9a9b1289-899b-4fe7-b1a8-cc090598a824 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 610.319512] env[65758]: DEBUG nova.scheduler.client.report [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 610.341423] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522259b6-406b-234b-0608-67604af1f117, 'name': SearchDatastore_Task, 'duration_secs': 0.013662} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.343101] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.343244] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 83fa942b-a195-4bcb-9ed5-5bb6764220a4/83fa942b-a195-4bcb-9ed5-5bb6764220a4.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.346618] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.346618] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.346744] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e04cc0c6-e309-49f6-8fc3-7a99335dd013 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.349516] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "f7a14628-cc55-41fa-ae89-3958855df8a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.349705] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "f7a14628-cc55-41fa-ae89-3958855df8a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.349900] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a25ffed6-c080-4c02-bc00-9b2b8b3064e1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.360554] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 610.360554] env[65758]: value = "task-4659868" [ 610.360554] env[65758]: _type = "Task" [ 610.360554] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.362035] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.362205] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.366208] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e743c7b8-1014-475e-8f67-66aac39de913 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.374180] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 610.374180] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fe63c8-7a59-13a8-2938-4f17c4b20dd9" [ 610.374180] env[65758]: _type = "Task" [ 610.374180] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.377010] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.388184] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fe63c8-7a59-13a8-2938-4f17c4b20dd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.446823] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquiring lock "e4540963-7be9-426e-90f8-b31524d2237b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.447136] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "e4540963-7be9-426e-90f8-b31524d2237b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.447345] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquiring lock "e4540963-7be9-426e-90f8-b31524d2237b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.447528] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "e4540963-7be9-426e-90f8-b31524d2237b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.449422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "e4540963-7be9-426e-90f8-b31524d2237b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.452752] env[65758]: INFO nova.compute.manager [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Terminating instance [ 610.825650] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.962s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.826322] env[65758]: DEBUG nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 610.831356] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.452s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.834097] env[65758]: INFO nova.compute.claims [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.876454] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659868, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.897057] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fe63c8-7a59-13a8-2938-4f17c4b20dd9, 'name': SearchDatastore_Task, 'duration_secs': 0.014693} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.897944] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d768e38-781e-4324-a702-638200664da5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.907469] env[65758]: DEBUG nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 610.911634] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 610.911634] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525f85a2-458a-2471-5049-7037c7e48b08" [ 610.911634] env[65758]: _type = "Task" [ 610.911634] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.923540] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525f85a2-458a-2471-5049-7037c7e48b08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.942042] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 610.942430] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 610.942729] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 610.942799] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 610.942917] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 610.943142] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 610.943271] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 610.943423] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 610.943584] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 610.943742] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 610.944028] env[65758]: DEBUG nova.virt.hardware [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 610.944975] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f961cb-4d0d-40b6-a583-9eeda3714126 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.957234] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc83edaf-89f8-4325-bc53-1d96dfe9ae37 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.962902] env[65758]: DEBUG nova.compute.manager [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 610.963141] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 610.964084] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1162430b-47ed-483b-9f93-c622306f7fd7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.986382] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 610.986682] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b038cf7f-53f6-474e-be48-4e308c2ca57c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.996677] env[65758]: DEBUG oslo_vmware.api [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 610.996677] env[65758]: value = "task-4659869" [ 610.996677] env[65758]: _type = "Task" [ 610.996677] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.010075] env[65758]: DEBUG oslo_vmware.api [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659869, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.341699] env[65758]: DEBUG nova.compute.utils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 611.343160] env[65758]: DEBUG nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 611.343366] env[65758]: DEBUG nova.network.neutron [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 611.343690] env[65758]: WARNING neutronclient.v2_0.client [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 611.343985] env[65758]: WARNING neutronclient.v2_0.client [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 611.344592] env[65758]: WARNING openstack [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 611.344931] env[65758]: WARNING openstack [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 611.374400] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659868, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701704} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.374650] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 83fa942b-a195-4bcb-9ed5-5bb6764220a4/83fa942b-a195-4bcb-9ed5-5bb6764220a4.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.374908] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.375205] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a2838e4-4b62-42d6-a100-14632fdb017c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.384576] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 611.384576] env[65758]: value = "task-4659870" [ 611.384576] env[65758]: _type = "Task" [ 611.384576] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.392878] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659870, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.420826] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525f85a2-458a-2471-5049-7037c7e48b08, 'name': SearchDatastore_Task, 'duration_secs': 0.067058} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.422057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.422057] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] adc1b956-1b5a-4272-b0ff-95a565e9c45c/adc1b956-1b5a-4272-b0ff-95a565e9c45c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 611.422057] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-745f1d54-0a9e-4fa1-8e11-a0137d5ffd56 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.428887] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 611.428887] env[65758]: value = "task-4659871" [ 611.428887] env[65758]: _type = "Task" [ 611.428887] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.439433] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659871, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.510940] env[65758]: DEBUG oslo_vmware.api [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659869, 'name': PowerOffVM_Task, 'duration_secs': 0.339017} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.511290] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 611.511580] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 611.511981] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c123bca-763f-4ff2-9307-1ed3011d9a67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.600626] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 611.600984] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 611.601267] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Deleting the datastore file [datastore2] e4540963-7be9-426e-90f8-b31524d2237b {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 611.601810] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7384512d-c52a-4927-8380-fb4d7c6ba36c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.611645] env[65758]: DEBUG oslo_vmware.api [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for the task: (returnval){ [ 611.611645] env[65758]: value = "task-4659873" [ 611.611645] env[65758]: _type = "Task" [ 611.611645] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.621564] env[65758]: DEBUG oslo_vmware.api [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.855180] env[65758]: DEBUG nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 611.899054] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659870, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.183293} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.901726] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 611.902834] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d129e0ee-b73f-4f56-aa7d-b3e5ea05ac72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.927694] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 83fa942b-a195-4bcb-9ed5-5bb6764220a4/83fa942b-a195-4bcb-9ed5-5bb6764220a4.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 611.931521] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4a6e050-30ce-4182-91a4-8057b734e91e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.956773] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659871, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.958476] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 611.958476] env[65758]: value = "task-4659874" [ 611.958476] env[65758]: _type = "Task" [ 611.958476] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.973746] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.122876] env[65758]: DEBUG oslo_vmware.api [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Task: {'id': task-4659873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427032} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.125555] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 612.125754] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 612.125926] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 612.126102] env[65758]: INFO nova.compute.manager [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 612.126351] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 612.126857] env[65758]: DEBUG nova.compute.manager [-] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 612.126959] env[65758]: DEBUG nova.network.neutron [-] [instance: e4540963-7be9-426e-90f8-b31524d2237b] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 612.127204] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 612.128045] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 612.128502] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 612.167392] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c82a656-3ff6-49a6-8644-b81861fe610e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.179602] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db31bfca-2780-4fd1-b46b-1acae98a2702 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.222588] env[65758]: DEBUG nova.network.neutron [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Successfully updated port: 9a9b1289-899b-4fe7-b1a8-cc090598a824 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 612.222588] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2469c02-0ce3-46fb-8f50-0d7150629512 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.235681] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a558a0-f965-4cf2-afce-f9a3695e384f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.252764] env[65758]: DEBUG nova.compute.provider_tree [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.327236] env[65758]: DEBUG nova.policy [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3338c19613c041abb681fa6cc661652a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e114eef3998848699a9a086fee86db29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 612.357727] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 612.460354] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659871, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576527} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.464897] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] adc1b956-1b5a-4272-b0ff-95a565e9c45c/adc1b956-1b5a-4272-b0ff-95a565e9c45c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 612.465370] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 612.465899] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c3581ad-6222-4315-b4a8-7acdd26f5b49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.476686] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659874, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.478102] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 612.478102] env[65758]: value = "task-4659875" [ 612.478102] env[65758]: _type = "Task" [ 612.478102] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.488695] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.726433] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquiring lock "refresh_cache-64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.726664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquired lock "refresh_cache-64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.726907] env[65758]: DEBUG nova.network.neutron [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 612.761266] env[65758]: DEBUG nova.scheduler.client.report [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 612.787192] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquiring lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.788029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.788029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquiring lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.788029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.788029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.790244] env[65758]: INFO nova.compute.manager [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Terminating instance [ 612.867983] env[65758]: DEBUG nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 612.905399] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 612.905843] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 612.906079] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 612.906271] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 612.906479] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 612.906540] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 612.906750] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.906944] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 612.907098] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 612.907265] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 612.907594] env[65758]: DEBUG nova.virt.hardware [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 612.909036] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854666e1-ac91-4dbf-9c3e-d52088898ffd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.918794] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bb1058-d388-4e00-805d-f36d3d0ebd77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.971238] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659874, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.990439] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080052} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.990964] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.992070] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9565a283-7f50-4197-b140-5d70bb0d6d95 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.016793] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] adc1b956-1b5a-4272-b0ff-95a565e9c45c/adc1b956-1b5a-4272-b0ff-95a565e9c45c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 613.017362] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2030f31b-e075-4ae4-aca8-8361284ec459 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.042017] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 613.042017] env[65758]: value = "task-4659876" [ 613.042017] env[65758]: _type = "Task" [ 613.042017] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.050713] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659876, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.169525] env[65758]: DEBUG nova.network.neutron [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Successfully created port: b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 613.231290] env[65758]: WARNING openstack [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 613.231770] env[65758]: WARNING openstack [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 613.267785] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.267785] env[65758]: DEBUG nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 613.270076] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.740s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.271718] env[65758]: INFO nova.compute.claims [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.294212] env[65758]: DEBUG nova.compute.manager [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 613.294760] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.298018] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3121e5ea-1ab2-47a5-8578-dd1ed72aa60a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.305424] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.305742] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc5d649b-21b7-4680-8776-194540366a47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.314583] env[65758]: DEBUG oslo_vmware.api [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 613.314583] env[65758]: value = "task-4659877" [ 613.314583] env[65758]: _type = "Task" [ 613.314583] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.330839] env[65758]: DEBUG oslo_vmware.api [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659877, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.375441] env[65758]: DEBUG nova.network.neutron [-] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 613.472967] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659874, 'name': ReconfigVM_Task, 'duration_secs': 1.051803} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.473172] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 83fa942b-a195-4bcb-9ed5-5bb6764220a4/83fa942b-a195-4bcb-9ed5-5bb6764220a4.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 613.474159] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9277663-8c3e-45a7-a654-7d49e02007d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.486033] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 613.486033] env[65758]: value = "task-4659878" [ 613.486033] env[65758]: _type = "Task" [ 613.486033] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.552587] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659876, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.780632] env[65758]: DEBUG nova.compute.utils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 613.785994] env[65758]: DEBUG nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 613.786226] env[65758]: DEBUG nova.network.neutron [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 613.786563] env[65758]: WARNING neutronclient.v2_0.client [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 613.786838] env[65758]: WARNING neutronclient.v2_0.client [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 613.787443] env[65758]: WARNING openstack [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 613.788276] env[65758]: WARNING openstack [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 613.828177] env[65758]: DEBUG oslo_vmware.api [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659877, 'name': PowerOffVM_Task, 'duration_secs': 0.263132} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.829102] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 613.829278] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 613.829547] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6763358-99ff-4f35-9892-ae5f1c389aa8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.879811] env[65758]: INFO nova.compute.manager [-] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Took 1.75 seconds to deallocate network for instance. [ 613.899270] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 613.899270] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 613.899456] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Deleting the datastore file [datastore2] a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.899594] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fb44578-319a-469f-b175-4d12784dbb27 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.911269] env[65758]: DEBUG oslo_vmware.api [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for the task: (returnval){ [ 613.911269] env[65758]: value = "task-4659880" [ 613.911269] env[65758]: _type = "Task" [ 613.911269] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.922297] env[65758]: DEBUG oslo_vmware.api [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.939948] env[65758]: DEBUG nova.network.neutron [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 614.000165] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659878, 'name': Rename_Task, 'duration_secs': 0.205704} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.000454] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.000727] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fb03979-348f-4100-980f-2d165a643a14 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.009390] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 614.009390] env[65758]: value = "task-4659881" [ 614.009390] env[65758]: _type = "Task" [ 614.009390] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.021024] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659881, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.053490] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659876, 'name': ReconfigVM_Task, 'duration_secs': 0.698728} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.054210] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Reconfigured VM instance instance-00000008 to attach disk [datastore1] adc1b956-1b5a-4272-b0ff-95a565e9c45c/adc1b956-1b5a-4272-b0ff-95a565e9c45c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 614.055529] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26cc08ca-09d9-4472-aef7-c6b03cbbd88d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.063567] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 614.063567] env[65758]: value = "task-4659882" [ 614.063567] env[65758]: _type = "Task" [ 614.063567] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.072705] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659882, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.286795] env[65758]: DEBUG nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 614.388286] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.422388] env[65758]: DEBUG oslo_vmware.api [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Task: {'id': task-4659880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173679} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.422669] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.422829] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.422994] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.423175] env[65758]: INFO nova.compute.manager [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Took 1.13 seconds to destroy the instance on the hypervisor. [ 614.423420] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 614.423614] env[65758]: DEBUG nova.compute.manager [-] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 614.423707] env[65758]: DEBUG nova.network.neutron [-] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 614.423952] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 614.424485] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 614.424781] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 614.529313] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659881, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.578762] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659882, 'name': Rename_Task, 'duration_secs': 0.219203} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.579890] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.579890] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-079404ad-2e36-460e-8f14-49af187e17e8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.590687] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 614.590687] env[65758]: value = "task-4659883" [ 614.590687] env[65758]: _type = "Task" [ 614.590687] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.607174] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659883, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.618816] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a6595e-6d32-4d1c-b650-724ec34c0e24 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.629496] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3aba35-c795-4e03-baa6-bfe5b1351cd2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.663392] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4418d284-e433-4f58-8f14-f38c2a4d0c7c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.672179] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8702c1-84b1-48e3-b785-b1a6aef15142 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.687623] env[65758]: DEBUG nova.compute.provider_tree [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.022753] env[65758]: DEBUG oslo_vmware.api [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659881, 'name': PowerOnVM_Task, 'duration_secs': 0.636308} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.023043] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 615.023228] env[65758]: INFO nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Took 14.17 seconds to spawn the instance on the hypervisor. [ 615.023395] env[65758]: DEBUG nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 615.024205] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e45b853-c4c9-40ae-8703-70dc79904651 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.106989] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659883, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.130084] env[65758]: DEBUG nova.policy [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '924e20ffeed74edfa75b67a62af149d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5fc14c9e85d404a8a6db0167ac84491', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 615.191141] env[65758]: DEBUG nova.scheduler.client.report [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 615.227454] env[65758]: DEBUG nova.network.neutron [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Successfully updated port: b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 615.297562] env[65758]: DEBUG nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 615.325726] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 615.330290] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 615.330563] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 615.330737] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 615.330924] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 615.331124] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 615.331281] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 615.331493] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 615.331738] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 615.331844] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 615.332023] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 615.332194] env[65758]: DEBUG nova.virt.hardware [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 615.333093] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edcc74a-b33e-406f-b7d5-47093193183c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.341974] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13879989-fcdb-45e6-b9da-00264185489f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.547440] env[65758]: INFO nova.compute.manager [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Took 26.85 seconds to build instance. [ 615.610686] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659883, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.697240] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.697240] env[65758]: DEBUG nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 615.700376] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.522s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.701105] env[65758]: INFO nova.compute.claims [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.713339] env[65758]: DEBUG nova.network.neutron [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Successfully created port: 30a1632d-59ad-4b45-bb29-73404b1abc7c {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 615.730125] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.730418] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.730628] env[65758]: DEBUG nova.network.neutron [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 615.966889] env[65758]: WARNING neutronclient.v2_0.client [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 615.967569] env[65758]: WARNING openstack [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 615.967914] env[65758]: WARNING openstack [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 616.053655] env[65758]: DEBUG oslo_concurrency.lockutils [None req-734e5158-e9c1-46db-a81d-0da6fbd87f2b tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.363s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.107941] env[65758]: DEBUG oslo_vmware.api [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4659883, 'name': PowerOnVM_Task, 'duration_secs': 1.061267} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.108281] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 616.108551] env[65758]: INFO nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Took 12.83 seconds to spawn the instance on the hypervisor. [ 616.108778] env[65758]: DEBUG nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 616.109607] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cccad44-80ac-4eef-a16c-18d4a05c7aff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.117967] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.118302] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.206363] env[65758]: DEBUG nova.compute.utils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 616.210340] env[65758]: DEBUG nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 616.210654] env[65758]: DEBUG nova.network.neutron [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 616.211571] env[65758]: WARNING neutronclient.v2_0.client [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 616.211924] env[65758]: WARNING neutronclient.v2_0.client [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 616.212509] env[65758]: WARNING openstack [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 616.212857] env[65758]: WARNING openstack [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 616.234348] env[65758]: WARNING openstack [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 616.234790] env[65758]: WARNING openstack [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 616.404320] env[65758]: DEBUG nova.network.neutron [-] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 616.479846] env[65758]: DEBUG nova.policy [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3a79fbdbdc4294a30f87eabe5719de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9aaf5b39abda42f28a847d5fe0d0ecec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 616.506659] env[65758]: DEBUG nova.network.neutron [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 616.555261] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4247bb44-c51b-4a13-940c-e32cd7f19785 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.559049] env[65758]: DEBUG nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 616.571590] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180da6e5-87f7-4559-af01-bde746ca65b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.614992] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd0fc0e-0e76-43df-9482-c88f39635477 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.627087] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6e76a1-86c4-4f6c-8dde-b4b32529e0ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.640316] env[65758]: INFO nova.compute.manager [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Took 23.61 seconds to build instance. [ 616.653478] env[65758]: DEBUG nova.compute.provider_tree [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.711773] env[65758]: DEBUG nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 616.907193] env[65758]: INFO nova.compute.manager [-] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Took 2.48 seconds to deallocate network for instance. [ 617.087940] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.156228] env[65758]: DEBUG oslo_concurrency.lockutils [None req-575a4ced-698a-4488-9ce4-f00e86984b64 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.136s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.157115] env[65758]: DEBUG nova.scheduler.client.report [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 617.382125] env[65758]: DEBUG nova.network.neutron [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Updating instance_info_cache with network_info: [{"id": "9a9b1289-899b-4fe7-b1a8-cc090598a824", "address": "fa:16:3e:b4:9a:30", "network": {"id": "b386f8af-e1bb-4d47-8e97-5bf03a5c9af1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1680045637-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc5591f5fd643b7a836022e19f60b52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9b1289-89", "ovs_interfaceid": "9a9b1289-899b-4fe7-b1a8-cc090598a824", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 617.416011] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.488405] env[65758]: DEBUG nova.network.neutron [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Successfully updated port: 30a1632d-59ad-4b45-bb29-73404b1abc7c {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 617.653795] env[65758]: WARNING neutronclient.v2_0.client [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 617.654479] env[65758]: WARNING openstack [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 617.654957] env[65758]: WARNING openstack [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 617.664025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.964s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.664492] env[65758]: DEBUG nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 617.667642] env[65758]: DEBUG nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 617.670793] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.499s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.673972] env[65758]: INFO nova.compute.claims [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.723377] env[65758]: DEBUG nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 617.765656] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 617.765911] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 617.766075] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 617.766439] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 617.766439] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 617.766558] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 617.766715] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.766865] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 617.767445] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 617.767885] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 617.768113] env[65758]: DEBUG nova.virt.hardware [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 617.769682] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422efe35-e8b3-42fd-a900-b67721c33167 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.774349] env[65758]: DEBUG nova.network.neutron [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Successfully created port: df4cf195-46a9-4de5-ae34-2363de4377f0 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 617.784220] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65dcc2cd-3ee8-4977-83d7-d9f8bd306b40 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.813483] env[65758]: DEBUG nova.network.neutron [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Updating instance_info_cache with network_info: [{"id": "b574c870-790b-4dad-8dce-58d93bb6fe44", "address": "fa:16:3e:62:75:62", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb574c870-79", "ovs_interfaceid": "b574c870-790b-4dad-8dce-58d93bb6fe44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 617.886639] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Releasing lock "refresh_cache-64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.886639] env[65758]: DEBUG nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Instance network_info: |[{"id": "9a9b1289-899b-4fe7-b1a8-cc090598a824", "address": "fa:16:3e:b4:9a:30", "network": {"id": "b386f8af-e1bb-4d47-8e97-5bf03a5c9af1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1680045637-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc5591f5fd643b7a836022e19f60b52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9b1289-89", "ovs_interfaceid": "9a9b1289-899b-4fe7-b1a8-cc090598a824", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 617.886856] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:9a:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a9b1289-899b-4fe7-b1a8-cc090598a824', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.899874] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Creating folder: Project (bdc5591f5fd643b7a836022e19f60b52). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.904054] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7e630a7-e0d4-4df9-961e-6ec68814d488 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.918272] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Created folder: Project (bdc5591f5fd643b7a836022e19f60b52) in parent group-v909763. [ 617.918469] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Creating folder: Instances. Parent ref: group-v909788. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.918736] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51a8cb6f-32ef-4e20-86a8-6170da90f411 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.931948] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Created folder: Instances in parent group-v909788. [ 617.932234] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 617.932449] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 617.932682] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcbcab1f-250a-4974-97b9-498e6f712538 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.952795] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.952795] env[65758]: value = "task-4659886" [ 617.952795] env[65758]: _type = "Task" [ 617.952795] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.963424] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659886, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.993092] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquiring lock "refresh_cache-e48a075b-41b3-4612-bd5f-0a158d707a2f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.993092] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquired lock "refresh_cache-e48a075b-41b3-4612-bd5f-0a158d707a2f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.993092] env[65758]: DEBUG nova.network.neutron [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 618.173478] env[65758]: DEBUG nova.compute.utils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 618.177514] env[65758]: DEBUG nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 618.177514] env[65758]: DEBUG nova.network.neutron [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 618.177846] env[65758]: WARNING neutronclient.v2_0.client [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 618.178655] env[65758]: WARNING neutronclient.v2_0.client [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 618.179191] env[65758]: WARNING openstack [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 618.179274] env[65758]: WARNING openstack [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 618.221165] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.317622] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.318163] env[65758]: DEBUG nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Instance network_info: |[{"id": "b574c870-790b-4dad-8dce-58d93bb6fe44", "address": "fa:16:3e:62:75:62", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb574c870-79", "ovs_interfaceid": "b574c870-790b-4dad-8dce-58d93bb6fe44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 618.318861] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:75:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b574c870-790b-4dad-8dce-58d93bb6fe44', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 618.330088] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Creating folder: Project (e114eef3998848699a9a086fee86db29). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 618.330629] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0cdb0eb-ad9f-4067-91ef-def79a68b152 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.344661] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Created folder: Project (e114eef3998848699a9a086fee86db29) in parent group-v909763. [ 618.345110] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Creating folder: Instances. Parent ref: group-v909791. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 618.345912] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e86dee6-cd1a-4316-ac92-4f7eba381728 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.364320] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Created folder: Instances in parent group-v909791. [ 618.366692] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 618.366692] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 618.366692] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee3f9cb9-9b0a-4fba-8603-93365dde498a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.390804] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 618.390804] env[65758]: value = "task-4659889" [ 618.390804] env[65758]: _type = "Task" [ 618.390804] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.399848] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659889, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.464030] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659886, 'name': CreateVM_Task, 'duration_secs': 0.387772} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.464227] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 618.464728] env[65758]: WARNING neutronclient.v2_0.client [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 618.465111] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.465274] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.465612] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 618.465892] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2546f0c2-30a7-47bc-85fb-5ffdd2c3cf0c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.472038] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 618.472038] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529ac2e6-fcb2-bc04-1c4a-a9142a55acef" [ 618.472038] env[65758]: _type = "Task" [ 618.472038] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.479418] env[65758]: DEBUG nova.policy [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b98acb7287a94d7a8da26107eecdecf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd100ba970de24698aff03c4c537b3c18', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 618.490036] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529ac2e6-fcb2-bc04-1c4a-a9142a55acef, 'name': SearchDatastore_Task, 'duration_secs': 0.010301} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.490036] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.490036] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.490036] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.490311] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.490311] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 618.490388] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18af0715-955e-49de-800c-f1caa183471f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.495364] env[65758]: WARNING openstack [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 618.495723] env[65758]: WARNING openstack [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 618.506380] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 618.507045] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 618.507573] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4714488e-8455-48e7-a1a1-5927dec1f33c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.516040] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 618.516040] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52390c92-9405-4410-0c8b-4d0d60254803" [ 618.516040] env[65758]: _type = "Task" [ 618.516040] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.527097] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52390c92-9405-4410-0c8b-4d0d60254803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.692019] env[65758]: DEBUG nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 618.798015] env[65758]: DEBUG nova.network.neutron [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 618.846348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquiring lock "f1a1650b-4c45-47fc-9c45-f4625c959597" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.846348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "f1a1650b-4c45-47fc-9c45-f4625c959597" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.846348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquiring lock "f1a1650b-4c45-47fc-9c45-f4625c959597-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.846849] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "f1a1650b-4c45-47fc-9c45-f4625c959597-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.846849] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "f1a1650b-4c45-47fc-9c45-f4625c959597-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.853770] env[65758]: INFO nova.compute.manager [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Terminating instance [ 618.907844] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659889, 'name': CreateVM_Task, 'duration_secs': 0.425955} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.907844] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 618.907844] env[65758]: WARNING neutronclient.v2_0.client [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 618.908095] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.908309] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.908664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 618.908801] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3c249c5-fe06-49fb-9d25-58db9be9658f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.915347] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 618.915347] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5207f227-f3d8-9790-7469-74a61a38fd9c" [ 618.915347] env[65758]: _type = "Task" [ 618.915347] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.925558] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5207f227-f3d8-9790-7469-74a61a38fd9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.028426] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52390c92-9405-4410-0c8b-4d0d60254803, 'name': SearchDatastore_Task, 'duration_secs': 0.011175} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.029704] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-442ca2db-8d29-446c-810d-27c1e67f2231 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.036106] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 619.036106] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b08b26-8041-59d5-2ef7-a5a1dee5dd4d" [ 619.036106] env[65758]: _type = "Task" [ 619.036106] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.045733] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b08b26-8041-59d5-2ef7-a5a1dee5dd4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.061354] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74985f5-4cbc-4a32-b16c-d0d5025e6845 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.069781] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fe8d71-be70-43dd-a9ea-b0cc6e7b06a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.105366] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ff224f-b11f-4fa6-b416-150c863c346a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.114011] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b02208-3cb3-4129-bf1d-f03681133208 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.118755] env[65758]: DEBUG nova.network.neutron [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Successfully created port: afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 619.133486] env[65758]: DEBUG nova.compute.provider_tree [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.362366] env[65758]: DEBUG nova.compute.manager [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 619.362366] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.364281] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9df702f-2236-4d4e-a26d-7f5303ca3365 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.375043] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 619.375992] env[65758]: DEBUG nova.network.neutron [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Successfully updated port: df4cf195-46a9-4de5-ae34-2363de4377f0 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 619.377681] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b603724-3f4c-41cd-a75e-971b1c396d29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.387027] env[65758]: DEBUG oslo_vmware.api [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 619.387027] env[65758]: value = "task-4659890" [ 619.387027] env[65758]: _type = "Task" [ 619.387027] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.397576] env[65758]: DEBUG oslo_vmware.api [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659890, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.431036] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5207f227-f3d8-9790-7469-74a61a38fd9c, 'name': SearchDatastore_Task, 'duration_secs': 0.010874} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.431322] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.431602] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 619.431860] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.431995] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.432180] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 619.432450] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2091056-273e-4d2c-90a7-1f97385e13d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.444212] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 619.444396] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 619.445240] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f353730f-5473-4b74-8621-dafa1fffea53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.452256] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 619.452256] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e75aae-9059-9c35-ed26-324abb53e687" [ 619.452256] env[65758]: _type = "Task" [ 619.452256] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.462325] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e75aae-9059-9c35-ed26-324abb53e687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.552666] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b08b26-8041-59d5-2ef7-a5a1dee5dd4d, 'name': SearchDatastore_Task, 'duration_secs': 0.010379} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.553412] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.553412] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b/64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 619.553541] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62861ade-c536-4a59-a8e4-2f8c93152f31 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.562257] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 619.562257] env[65758]: value = "task-4659891" [ 619.562257] env[65758]: _type = "Task" [ 619.562257] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.572441] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.639965] env[65758]: DEBUG nova.scheduler.client.report [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 619.702956] env[65758]: DEBUG nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 619.737702] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 619.738057] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.738337] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 619.738337] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.738576] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 619.738638] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 619.739272] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 619.739272] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 619.739272] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 619.739272] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 619.739455] env[65758]: DEBUG nova.virt.hardware [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 619.740352] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9b8946-2024-457b-90b6-7082d4c66c31 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.749824] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec66c96-bc15-42e5-9a57-f9aef0aa81c7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.880551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "refresh_cache-2d787237-26e5-4519-9f6e-1d30b9d016cf" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.880868] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "refresh_cache-2d787237-26e5-4519-9f6e-1d30b9d016cf" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.880951] env[65758]: DEBUG nova.network.neutron [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 619.898937] env[65758]: DEBUG oslo_vmware.api [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659890, 'name': PowerOffVM_Task, 'duration_secs': 0.319763} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.899043] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 619.899919] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 619.899919] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c352696-f771-4cf3-9979-e9216684dcd5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.909236] env[65758]: WARNING neutronclient.v2_0.client [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 619.910215] env[65758]: WARNING openstack [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 619.910215] env[65758]: WARNING openstack [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 619.968317] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e75aae-9059-9c35-ed26-324abb53e687, 'name': SearchDatastore_Task, 'duration_secs': 0.009833} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.969614] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-007d4e4f-cd46-4d55-bd36-815bdc008c58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.978796] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 619.978796] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bfb945-82eb-7031-e1bb-aaf38458e406" [ 619.978796] env[65758]: _type = "Task" [ 619.978796] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.985087] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 619.985346] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 619.985527] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Deleting the datastore file [datastore2] f1a1650b-4c45-47fc-9c45-f4625c959597 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 619.986275] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-276f5417-173d-45e5-b14f-5bb7ab9c51fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.991782] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bfb945-82eb-7031-e1bb-aaf38458e406, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.998797] env[65758]: DEBUG oslo_vmware.api [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for the task: (returnval){ [ 619.998797] env[65758]: value = "task-4659893" [ 619.998797] env[65758]: _type = "Task" [ 619.998797] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.010311] env[65758]: DEBUG oslo_vmware.api [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.074445] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659891, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.147746] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 620.147746] env[65758]: DEBUG nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 620.149800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.142s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.151293] env[65758]: INFO nova.compute.claims [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.384984] env[65758]: WARNING openstack [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 620.385456] env[65758]: WARNING openstack [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 620.490214] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bfb945-82eb-7031-e1bb-aaf38458e406, 'name': SearchDatastore_Task, 'duration_secs': 0.038699} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.490529] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.490763] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 37aadd44-79e8-4479-862f-265549c9d802/37aadd44-79e8-4479-862f-265549c9d802.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 620.491039] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c7dbdbb-e5d1-41fe-9320-936f83b7759d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.498778] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 620.498778] env[65758]: value = "task-4659894" [ 620.498778] env[65758]: _type = "Task" [ 620.498778] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.510715] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.514590] env[65758]: DEBUG oslo_vmware.api [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Task: {'id': task-4659893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285534} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.514984] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 620.515256] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 620.515525] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.515797] env[65758]: INFO nova.compute.manager [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Took 1.15 seconds to destroy the instance on the hypervisor. [ 620.516418] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 620.516609] env[65758]: DEBUG nova.compute.manager [-] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 620.516609] env[65758]: DEBUG nova.network.neutron [-] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 620.517368] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 620.517597] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 620.517926] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 620.576381] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534107} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.576683] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b/64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 620.576938] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.577252] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db05f8a4-5edb-47ab-811a-9c215b6f46aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.586551] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 620.586551] env[65758]: value = "task-4659895" [ 620.586551] env[65758]: _type = "Task" [ 620.586551] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.597325] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.656137] env[65758]: DEBUG nova.compute.utils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 620.661031] env[65758]: DEBUG nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 620.661031] env[65758]: DEBUG nova.network.neutron [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 620.661031] env[65758]: WARNING neutronclient.v2_0.client [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 620.661272] env[65758]: WARNING neutronclient.v2_0.client [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 620.661920] env[65758]: WARNING openstack [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 620.662287] env[65758]: WARNING openstack [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 620.763695] env[65758]: DEBUG nova.network.neutron [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 620.833830] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 620.976183] env[65758]: DEBUG nova.network.neutron [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Successfully updated port: afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 620.982782] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522b06c9-87ec-49ba-b51a-cfe062db65ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.992775] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f20184-b20a-4961-b778-2e43c85c6bc5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.040425] env[65758]: DEBUG nova.policy [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3a79fbdbdc4294a30f87eabe5719de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9aaf5b39abda42f28a847d5fe0d0ecec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 621.043769] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04da9d5-d6ca-445a-8b9b-6a68a068ebd5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.051140] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543474} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.053491] env[65758]: DEBUG nova.network.neutron [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Updating instance_info_cache with network_info: [{"id": "30a1632d-59ad-4b45-bb29-73404b1abc7c", "address": "fa:16:3e:24:e3:7f", "network": {"id": "1b7f73a4-ad38-4ec6-8c7a-4bd567147409", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1265965383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e5fc14c9e85d404a8a6db0167ac84491", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a1632d-59", "ovs_interfaceid": "30a1632d-59ad-4b45-bb29-73404b1abc7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 621.055854] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 37aadd44-79e8-4479-862f-265549c9d802/37aadd44-79e8-4479-862f-265549c9d802.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 621.056136] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 621.056654] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a1393b6-75ea-4981-a4aa-eddf571b7ae0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.062342] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62581df1-69ea-450f-af74-919351e5d2b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.068710] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 621.068710] env[65758]: value = "task-4659896" [ 621.068710] env[65758]: _type = "Task" [ 621.068710] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.081830] env[65758]: DEBUG nova.compute.provider_tree [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.088695] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659896, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.099321] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077433} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.102371] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 621.103613] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e3960d-2080-48d0-a953-c8fd4f338bca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.129125] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b/64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 621.129483] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1a6af21-3f80-426d-b882-2814c5c62d6e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.149691] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 621.149691] env[65758]: value = "task-4659897" [ 621.149691] env[65758]: _type = "Task" [ 621.149691] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.159885] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659897, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.160438] env[65758]: DEBUG nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 621.475685] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquiring lock "28ccc013-962d-4607-83a2-5fcd480c27b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.476104] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "28ccc013-962d-4607-83a2-5fcd480c27b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.479038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquiring lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.479207] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquired lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.479384] env[65758]: DEBUG nova.network.neutron [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 621.558261] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Releasing lock "refresh_cache-e48a075b-41b3-4612-bd5f-0a158d707a2f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.558261] env[65758]: DEBUG nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Instance network_info: |[{"id": "30a1632d-59ad-4b45-bb29-73404b1abc7c", "address": "fa:16:3e:24:e3:7f", "network": {"id": "1b7f73a4-ad38-4ec6-8c7a-4bd567147409", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1265965383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e5fc14c9e85d404a8a6db0167ac84491", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a1632d-59", "ovs_interfaceid": "30a1632d-59ad-4b45-bb29-73404b1abc7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 621.558693] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:e3:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7894814c-6be3-4b80-a08e-4a771bc05dd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30a1632d-59ad-4b45-bb29-73404b1abc7c', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.566088] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Creating folder: Project (e5fc14c9e85d404a8a6db0167ac84491). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.566436] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f7bcfab-3777-4483-9749-4534856ff47d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.580760] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659896, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067945} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.582899] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 621.583839] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Created folder: Project (e5fc14c9e85d404a8a6db0167ac84491) in parent group-v909763. [ 621.583839] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Creating folder: Instances. Parent ref: group-v909794. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.585191] env[65758]: DEBUG nova.scheduler.client.report [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 621.588645] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efd4e5c-f8c0-4d3f-a0f2-5bf00f204ada {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.591640] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fbf525e-0041-4816-8e7d-4a9d1be2ef30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.616267] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 37aadd44-79e8-4479-862f-265549c9d802/37aadd44-79e8-4479-862f-265549c9d802.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 621.618949] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f9649c8-9e3c-48f2-b535-8eb379fd8d7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.633679] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Created folder: Instances in parent group-v909794. [ 621.633958] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 621.634963] env[65758]: DEBUG nova.network.neutron [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Successfully created port: 09d73b49-88a0-426f-915b-c6c03998738f {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 621.637393] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 621.638784] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9aa9bb3b-4a5d-4989-8635-a50466f51f69 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.655445] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 621.655445] env[65758]: value = "task-4659900" [ 621.655445] env[65758]: _type = "Task" [ 621.655445] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.675559] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.675559] env[65758]: value = "task-4659901" [ 621.675559] env[65758]: _type = "Task" [ 621.675559] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.675873] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659897, 'name': ReconfigVM_Task, 'duration_secs': 0.35263} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.676116] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659900, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.676763] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b/64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.680910] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51e2d82a-1c89-484b-abfb-a385422f14ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.688809] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659901, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.693227] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 621.693227] env[65758]: value = "task-4659902" [ 621.693227] env[65758]: _type = "Task" [ 621.693227] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.703219] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659902, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.815042] env[65758]: DEBUG nova.network.neutron [-] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 621.864995] env[65758]: WARNING neutronclient.v2_0.client [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 621.865920] env[65758]: WARNING openstack [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 621.866392] env[65758]: WARNING openstack [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 621.982512] env[65758]: WARNING openstack [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 621.983054] env[65758]: WARNING openstack [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 622.047324] env[65758]: DEBUG nova.network.neutron [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Updating instance_info_cache with network_info: [{"id": "df4cf195-46a9-4de5-ae34-2363de4377f0", "address": "fa:16:3e:a2:35:0f", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf4cf195-46", "ovs_interfaceid": "df4cf195-46a9-4de5-ae34-2363de4377f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 622.075980] env[65758]: DEBUG nova.network.neutron [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 622.096165] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.946s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.096753] env[65758]: DEBUG nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 622.099323] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.532s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.100870] env[65758]: INFO nova.compute.claims [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.169642] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659900, 'name': ReconfigVM_Task, 'duration_secs': 0.352567} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.169966] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 37aadd44-79e8-4479-862f-265549c9d802/37aadd44-79e8-4479-862f-265549c9d802.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 622.170593] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5e89c98-3bea-4ec5-8eba-8bca2ee2e8d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.178634] env[65758]: DEBUG nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 622.181034] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 622.181034] env[65758]: value = "task-4659903" [ 622.181034] env[65758]: _type = "Task" [ 622.181034] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.194165] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659901, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.201569] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659903, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.210243] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659902, 'name': Rename_Task, 'duration_secs': 0.174903} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.219302] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 622.219302] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 622.219302] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 622.219302] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 622.219549] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 622.219549] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 622.219549] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 622.219549] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 622.219549] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 622.219748] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 622.219968] env[65758]: DEBUG nova.virt.hardware [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 622.220379] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 622.221420] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c0ef05-20c2-4999-860f-2bb6b241184f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.224927] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1ad1f3a-b593-4346-a4fb-21495117b9da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.235740] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee0c5d7-0b74-42a2-8cf9-4303366acc80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.240074] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 622.240074] env[65758]: value = "task-4659904" [ 622.240074] env[65758]: _type = "Task" [ 622.240074] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.261216] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659904, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.320817] env[65758]: INFO nova.compute.manager [-] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Took 1.80 seconds to deallocate network for instance. [ 622.345663] env[65758]: WARNING neutronclient.v2_0.client [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 622.346754] env[65758]: WARNING openstack [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 622.347400] env[65758]: WARNING openstack [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 622.427952] env[65758]: DEBUG nova.compute.manager [req-7c1c3300-ee80-43d8-bacc-2cf85fa8d0bc req-f8030ffd-10a6-4e49-8c21-7df1c7912622 service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Received event network-vif-plugged-9a9b1289-899b-4fe7-b1a8-cc090598a824 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 622.428544] env[65758]: DEBUG oslo_concurrency.lockutils [req-7c1c3300-ee80-43d8-bacc-2cf85fa8d0bc req-f8030ffd-10a6-4e49-8c21-7df1c7912622 service nova] Acquiring lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.428859] env[65758]: DEBUG oslo_concurrency.lockutils [req-7c1c3300-ee80-43d8-bacc-2cf85fa8d0bc req-f8030ffd-10a6-4e49-8c21-7df1c7912622 service nova] Lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.429080] env[65758]: DEBUG oslo_concurrency.lockutils [req-7c1c3300-ee80-43d8-bacc-2cf85fa8d0bc req-f8030ffd-10a6-4e49-8c21-7df1c7912622 service nova] Lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.429326] env[65758]: DEBUG nova.compute.manager [req-7c1c3300-ee80-43d8-bacc-2cf85fa8d0bc req-f8030ffd-10a6-4e49-8c21-7df1c7912622 service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] No waiting events found dispatching network-vif-plugged-9a9b1289-899b-4fe7-b1a8-cc090598a824 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 622.429530] env[65758]: WARNING nova.compute.manager [req-7c1c3300-ee80-43d8-bacc-2cf85fa8d0bc req-f8030ffd-10a6-4e49-8c21-7df1c7912622 service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Received unexpected event network-vif-plugged-9a9b1289-899b-4fe7-b1a8-cc090598a824 for instance with vm_state building and task_state spawning. [ 622.500665] env[65758]: DEBUG nova.network.neutron [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Updating instance_info_cache with network_info: [{"id": "afb9abca-e097-4678-9ae2-5b3775cf16e9", "address": "fa:16:3e:dd:80:22", "network": {"id": "dec9f876-3382-4488-90e2-702f201ed688", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-107507873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d100ba970de24698aff03c4c537b3c18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb9abca-e0", "ovs_interfaceid": "afb9abca-e097-4678-9ae2-5b3775cf16e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 622.550030] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "refresh_cache-2d787237-26e5-4519-9f6e-1d30b9d016cf" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.550195] env[65758]: DEBUG nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance network_info: |[{"id": "df4cf195-46a9-4de5-ae34-2363de4377f0", "address": "fa:16:3e:a2:35:0f", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf4cf195-46", "ovs_interfaceid": "df4cf195-46a9-4de5-ae34-2363de4377f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 622.550743] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:35:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df4cf195-46a9-4de5-ae34-2363de4377f0', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 622.559272] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Creating folder: Project (9aaf5b39abda42f28a847d5fe0d0ecec). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.559998] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0296180-948f-467d-8703-6d4e0472c19d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.575292] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Created folder: Project (9aaf5b39abda42f28a847d5fe0d0ecec) in parent group-v909763. [ 622.575292] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Creating folder: Instances. Parent ref: group-v909797. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.575292] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52b962cb-de76-4644-9799-2fa8341c0c73 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.586356] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Created folder: Instances in parent group-v909797. [ 622.586525] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 622.587050] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 622.587050] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7631d2c-40e4-4868-961f-28699ef26c81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.605993] env[65758]: DEBUG nova.compute.utils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 622.614937] env[65758]: DEBUG nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 622.614937] env[65758]: DEBUG nova.network.neutron [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 622.614937] env[65758]: WARNING neutronclient.v2_0.client [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 622.615124] env[65758]: WARNING neutronclient.v2_0.client [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 622.615731] env[65758]: WARNING openstack [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 622.616132] env[65758]: WARNING openstack [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 622.624054] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.624054] env[65758]: value = "task-4659907" [ 622.624054] env[65758]: _type = "Task" [ 622.624054] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.639898] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659907, 'name': CreateVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.705552] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659901, 'name': CreateVM_Task, 'duration_secs': 0.787455} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.713384] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 622.713799] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659903, 'name': Rename_Task, 'duration_secs': 0.179177} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.714592] env[65758]: WARNING neutronclient.v2_0.client [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 622.715071] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.715153] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.715552] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 622.715901] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 622.716231] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edb05859-5995-42ed-bb59-f5570ab65560 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.718464] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38f96b71-edf8-4d72-99e5-5f66ea80e419 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.726083] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 622.726083] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fe3cd8-594f-0f9b-c316-2f847d23bc38" [ 622.726083] env[65758]: _type = "Task" [ 622.726083] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.734883] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 622.734883] env[65758]: value = "task-4659908" [ 622.734883] env[65758]: _type = "Task" [ 622.734883] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.744731] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fe3cd8-594f-0f9b-c316-2f847d23bc38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.763649] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659908, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.763903] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659904, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.801985] env[65758]: DEBUG nova.policy [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b6e413458a84a9b8f2b6dcd0061fc33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd550f85853f447bb91a89b6bc6c5720', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 622.829675] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.981226] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d6767c-9baa-444c-8f23-8418c1eaaa21 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.990886] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4a1f28-48a5-4df6-84c2-f871cd03221d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.022797] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Releasing lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.023128] env[65758]: DEBUG nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Instance network_info: |[{"id": "afb9abca-e097-4678-9ae2-5b3775cf16e9", "address": "fa:16:3e:dd:80:22", "network": {"id": "dec9f876-3382-4488-90e2-702f201ed688", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-107507873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d100ba970de24698aff03c4c537b3c18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb9abca-e0", "ovs_interfaceid": "afb9abca-e097-4678-9ae2-5b3775cf16e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 623.023819] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:80:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cde23701-02ca-4cb4-b5a6-d321f8ac9660', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'afb9abca-e097-4678-9ae2-5b3775cf16e9', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 623.031328] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Creating folder: Project (d100ba970de24698aff03c4c537b3c18). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 623.032658] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cce3797-0f00-4492-8eeb-741de590334a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.035936] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c74e488a-e3e0-4eab-bc5a-f507172ee4a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.044459] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392766a8-4d8d-43bf-870e-6609fd41186d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.049632] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Created folder: Project (d100ba970de24698aff03c4c537b3c18) in parent group-v909763. [ 623.049815] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Creating folder: Instances. Parent ref: group-v909800. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 623.050518] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27463a62-42d3-48fa-a263-1b2121fc41c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.061315] env[65758]: DEBUG nova.compute.provider_tree [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.063956] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Created folder: Instances in parent group-v909800. [ 623.064202] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 623.064632] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 623.064834] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f772401-d734-450d-ba7e-a2fae9bfa959 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.085521] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 623.085521] env[65758]: value = "task-4659911" [ 623.085521] env[65758]: _type = "Task" [ 623.085521] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.094271] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659911, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.112773] env[65758]: DEBUG nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 623.129641] env[65758]: DEBUG nova.network.neutron [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Successfully created port: fc7dd128-390d-4176-b4ab-960fb037bc95 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 623.142252] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659907, 'name': CreateVM_Task, 'duration_secs': 0.435458} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.142252] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.142714] env[65758]: WARNING neutronclient.v2_0.client [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 623.142908] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.239164] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fe3cd8-594f-0f9b-c316-2f847d23bc38, 'name': SearchDatastore_Task, 'duration_secs': 0.017589} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.243246] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.243513] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.243762] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.243897] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.244085] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.244557] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.244868] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 623.245176] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4aa5945-cbd7-4932-9298-42a31d2449a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.247572] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80583761-4e13-4406-9f3e-cefc26de2e38 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.255956] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659908, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.263117] env[65758]: DEBUG oslo_vmware.api [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659904, 'name': PowerOnVM_Task, 'duration_secs': 0.565364} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.264315] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 623.264540] env[65758]: INFO nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Took 12.36 seconds to spawn the instance on the hypervisor. [ 623.264741] env[65758]: DEBUG nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 623.265188] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 623.265188] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527c2358-85d2-fcf4-1089-55256a0c34e6" [ 623.265188] env[65758]: _type = "Task" [ 623.265188] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.266883] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb2153c-cf6d-4d88-88be-b18e8ff46f0a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.270118] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.270308] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 623.274929] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e34c0699-0424-4ddf-b20c-c748bb1c17e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.290060] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527c2358-85d2-fcf4-1089-55256a0c34e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.290958] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 623.290958] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522ca96f-0e89-bba4-1bc0-dc85ac46d75d" [ 623.290958] env[65758]: _type = "Task" [ 623.290958] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.302190] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522ca96f-0e89-bba4-1bc0-dc85ac46d75d, 'name': SearchDatastore_Task, 'duration_secs': 0.013296} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.302488] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3061c984-ac05-47af-809a-660dc5f2bbf1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.311034] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 623.311034] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5216d565-e8b2-3e8e-59b1-2418c16fcc9b" [ 623.311034] env[65758]: _type = "Task" [ 623.311034] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.320738] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5216d565-e8b2-3e8e-59b1-2418c16fcc9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.325323] env[65758]: DEBUG nova.network.neutron [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Successfully updated port: 09d73b49-88a0-426f-915b-c6c03998738f {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 623.417338] env[65758]: DEBUG nova.compute.manager [req-97c0251d-6e47-49d4-92ef-488a3e8af80a req-8de7fe0a-fa00-42a8-94bd-d362d2a7cadc service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Received event network-vif-plugged-b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 623.417746] env[65758]: DEBUG oslo_concurrency.lockutils [req-97c0251d-6e47-49d4-92ef-488a3e8af80a req-8de7fe0a-fa00-42a8-94bd-d362d2a7cadc service nova] Acquiring lock "37aadd44-79e8-4479-862f-265549c9d802-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.418037] env[65758]: DEBUG oslo_concurrency.lockutils [req-97c0251d-6e47-49d4-92ef-488a3e8af80a req-8de7fe0a-fa00-42a8-94bd-d362d2a7cadc service nova] Lock "37aadd44-79e8-4479-862f-265549c9d802-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.418297] env[65758]: DEBUG oslo_concurrency.lockutils [req-97c0251d-6e47-49d4-92ef-488a3e8af80a req-8de7fe0a-fa00-42a8-94bd-d362d2a7cadc service nova] Lock "37aadd44-79e8-4479-862f-265549c9d802-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.418427] env[65758]: DEBUG nova.compute.manager [req-97c0251d-6e47-49d4-92ef-488a3e8af80a req-8de7fe0a-fa00-42a8-94bd-d362d2a7cadc service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] No waiting events found dispatching network-vif-plugged-b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 623.418586] env[65758]: WARNING nova.compute.manager [req-97c0251d-6e47-49d4-92ef-488a3e8af80a req-8de7fe0a-fa00-42a8-94bd-d362d2a7cadc service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Received unexpected event network-vif-plugged-b574c870-790b-4dad-8dce-58d93bb6fe44 for instance with vm_state building and task_state spawning. [ 623.567720] env[65758]: DEBUG nova.scheduler.client.report [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 623.600976] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659911, 'name': CreateVM_Task, 'duration_secs': 0.459275} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.601171] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.601689] env[65758]: WARNING neutronclient.v2_0.client [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 623.602025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.751804] env[65758]: DEBUG oslo_vmware.api [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4659908, 'name': PowerOnVM_Task, 'duration_secs': 0.551583} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.752118] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 623.752334] env[65758]: INFO nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Took 10.88 seconds to spawn the instance on the hypervisor. [ 623.752504] env[65758]: DEBUG nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 623.753346] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c202335-8a5b-4a91-a841-2b21fe2d1726 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.780987] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527c2358-85d2-fcf4-1089-55256a0c34e6, 'name': SearchDatastore_Task, 'duration_secs': 0.024585} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.781334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.781596] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.781848] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.782115] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.782427] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 623.782692] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ebe2243-0b55-44d6-b58c-8e696c73fc19 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.788658] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 623.788658] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5257f692-a6c9-bf09-f682-ff92eee2ae72" [ 623.788658] env[65758]: _type = "Task" [ 623.788658] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.800467] env[65758]: INFO nova.compute.manager [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Took 26.62 seconds to build instance. [ 623.803785] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5257f692-a6c9-bf09-f682-ff92eee2ae72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.821109] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5216d565-e8b2-3e8e-59b1-2418c16fcc9b, 'name': SearchDatastore_Task, 'duration_secs': 0.013146} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.821978] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.822289] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e48a075b-41b3-4612-bd5f-0a158d707a2f/e48a075b-41b3-4612-bd5f-0a158d707a2f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 623.822574] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.822803] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.823050] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9c6a650-7576-4949-9883-5352cd2a1524 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.825525] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64dd5828-a9a9-49fc-861d-565298003b16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.827946] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "refresh_cache-3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.828122] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "refresh_cache-3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.828286] env[65758]: DEBUG nova.network.neutron [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 623.837089] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 623.837089] env[65758]: value = "task-4659912" [ 623.837089] env[65758]: _type = "Task" [ 623.837089] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.839155] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.839356] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 623.843474] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08769355-e37a-457b-aeee-f7f09b2bc9a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.850575] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 623.850575] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524d23ac-4843-90f1-5ebe-512168d56574" [ 623.850575] env[65758]: _type = "Task" [ 623.850575] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.860661] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.867130] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524d23ac-4843-90f1-5ebe-512168d56574, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.074583] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.975s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.075139] env[65758]: DEBUG nova.compute.manager [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 624.078898] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.158s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.079136] env[65758]: DEBUG nova.objects.instance [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Lazy-loading 'resources' on Instance uuid 67fdb417-62ea-412c-8b82-868d59149f89 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 624.125043] env[65758]: DEBUG nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 624.145949] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 624.146262] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.146419] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 624.146563] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.146764] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 624.148486] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 624.148486] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 624.148486] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 624.148486] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 624.148486] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 624.148674] env[65758]: DEBUG nova.virt.hardware [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 624.149043] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825725f1-fdab-489c-8973-dbf2f9022e80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.158434] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63f5950-3ac7-44dc-a7d2-df46143c9ac9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.277304] env[65758]: INFO nova.compute.manager [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Took 26.88 seconds to build instance. [ 624.309546] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7245925a-44de-457f-8827-fb3568676961 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.168s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.309846] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5257f692-a6c9-bf09-f682-ff92eee2ae72, 'name': SearchDatastore_Task, 'duration_secs': 0.027207} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.311334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.311597] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 624.311861] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.331551] env[65758]: WARNING openstack [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 624.331793] env[65758]: WARNING openstack [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 624.349299] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.366828] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524d23ac-4843-90f1-5ebe-512168d56574, 'name': SearchDatastore_Task, 'duration_secs': 0.014808} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.369303] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b609be01-5235-4bac-a73c-2da51c84aaa1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.374184] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 624.374184] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52816bb4-03aa-c769-d0e7-30787f5b689a" [ 624.374184] env[65758]: _type = "Task" [ 624.374184] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.385120] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52816bb4-03aa-c769-d0e7-30787f5b689a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.527979] env[65758]: DEBUG nova.network.neutron [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 624.586566] env[65758]: DEBUG nova.compute.utils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 624.592867] env[65758]: DEBUG nova.compute.manager [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 624.644988] env[65758]: DEBUG nova.compute.manager [None req-4849365f-d4b6-473e-8716-fb004a2a0dde tempest-ServerDiagnosticsV248Test-1955092603 tempest-ServerDiagnosticsV248Test-1955092603-project-admin] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 624.647733] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2103ae57-5a68-4f87-af20-3f1d6429f800 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.663921] env[65758]: INFO nova.compute.manager [None req-4849365f-d4b6-473e-8716-fb004a2a0dde tempest-ServerDiagnosticsV248Test-1955092603 tempest-ServerDiagnosticsV248Test-1955092603-project-admin] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Retrieving diagnostics [ 624.666179] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf9d0f4-6019-489b-9db6-20c4d7b916cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.757452] env[65758]: WARNING neutronclient.v2_0.client [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 624.758298] env[65758]: WARNING openstack [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 624.758733] env[65758]: WARNING openstack [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 624.780087] env[65758]: DEBUG oslo_concurrency.lockutils [None req-04e5dfdc-45f1-4702-bcd9-fb8d015e079b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "37aadd44-79e8-4479-862f-265549c9d802" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.399s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.813806] env[65758]: DEBUG nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 624.864475] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659912, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.985661} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.864777] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e48a075b-41b3-4612-bd5f-0a158d707a2f/e48a075b-41b3-4612-bd5f-0a158d707a2f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 624.864991] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.868056] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db2ff7a5-eb21-4ad4-9e8b-904eb408efb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.877054] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 624.877054] env[65758]: value = "task-4659913" [ 624.877054] env[65758]: _type = "Task" [ 624.877054] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.904583] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52816bb4-03aa-c769-d0e7-30787f5b689a, 'name': SearchDatastore_Task, 'duration_secs': 0.032382} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.904821] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659913, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.905828] env[65758]: DEBUG nova.network.neutron [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Successfully updated port: fc7dd128-390d-4176-b4ab-960fb037bc95 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 624.907016] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.907269] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 624.907577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.907719] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 624.907927] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4064575b-5546-41a3-bb15-bc4d56f6df8d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.918028] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c98b4762-7229-420a-9f6f-6b79eec39863 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.928187] env[65758]: DEBUG nova.network.neutron [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Updating instance_info_cache with network_info: [{"id": "09d73b49-88a0-426f-915b-c6c03998738f", "address": "fa:16:3e:17:ed:60", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09d73b49-88", "ovs_interfaceid": "09d73b49-88a0-426f-915b-c6c03998738f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 624.932120] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 624.932120] env[65758]: value = "task-4659914" [ 624.932120] env[65758]: _type = "Task" [ 624.932120] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.933028] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 624.933028] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 624.933453] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97b52ac0-9248-42f1-9084-33f286c05fac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.946506] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 624.946506] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526b32ae-71f5-12a6-0dcc-96dd11d692a1" [ 624.946506] env[65758]: _type = "Task" [ 624.946506] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.950220] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.963324] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526b32ae-71f5-12a6-0dcc-96dd11d692a1, 'name': SearchDatastore_Task, 'duration_secs': 0.014133} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.964706] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f881b329-9380-410f-a4aa-853ebfddf56d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.973175] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 624.973175] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52137552-befb-e715-ffef-adbf693a4cbc" [ 624.973175] env[65758]: _type = "Task" [ 624.973175] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.985328] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52137552-befb-e715-ffef-adbf693a4cbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.010701] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a28d25-434c-4f38-8f19-b67b2d1419b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.019927] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4c8071-8812-477c-b1be-5b7e4578615d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.051052] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df199aac-2eaf-46df-a922-36cacb3081f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.059532] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aec93c2-5522-45a0-bce7-8d2308c6218b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.075862] env[65758]: DEBUG nova.compute.provider_tree [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.081038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquiring lock "83b637d8-b9fa-4159-b879-c1d737871539" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.081273] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "83b637d8-b9fa-4159-b879-c1d737871539" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.092568] env[65758]: DEBUG nova.compute.manager [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 625.340733] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.393077] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659913, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07671} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.393906] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 625.394675] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4e57f9-c4a2-4f95-ac72-6e656b756cc3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.413270] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-0ac196fa-d88c-45a8-999e-8b5216912041" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.413413] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-0ac196fa-d88c-45a8-999e-8b5216912041" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.413532] env[65758]: DEBUG nova.network.neutron [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 625.423992] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] e48a075b-41b3-4612-bd5f-0a158d707a2f/e48a075b-41b3-4612-bd5f-0a158d707a2f.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 625.424695] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1997a2fc-a146-4032-a366-6d27a2e1f8a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.441173] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "refresh_cache-3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.441576] env[65758]: DEBUG nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Instance network_info: |[{"id": "09d73b49-88a0-426f-915b-c6c03998738f", "address": "fa:16:3e:17:ed:60", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09d73b49-88", "ovs_interfaceid": "09d73b49-88a0-426f-915b-c6c03998738f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 625.442551] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:ed:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09d73b49-88a0-426f-915b-c6c03998738f', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.451236] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 625.456499] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 625.456686] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7cae03d-99e5-4ed2-85c2-fd50ad900006 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.476191] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 625.476191] env[65758]: value = "task-4659915" [ 625.476191] env[65758]: _type = "Task" [ 625.476191] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.485724] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659914, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.491494] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.491494] env[65758]: value = "task-4659916" [ 625.491494] env[65758]: _type = "Task" [ 625.491494] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.506226] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659915, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.507761] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52137552-befb-e715-ffef-adbf693a4cbc, 'name': SearchDatastore_Task, 'duration_secs': 0.021592} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.507975] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.509580] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e60efbcd-1c4e-40a1-8bc1-893daa511073/e60efbcd-1c4e-40a1-8bc1-893daa511073.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 625.509580] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa92667b-15d9-4479-a71c-d21ead6cb163 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.515914] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659916, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.522675] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 625.522675] env[65758]: value = "task-4659917" [ 625.522675] env[65758]: _type = "Task" [ 625.522675] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.536989] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659917, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.582985] env[65758]: DEBUG nova.scheduler.client.report [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 625.587774] env[65758]: DEBUG nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 625.927253] env[65758]: WARNING openstack [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 625.928869] env[65758]: WARNING openstack [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 625.956343] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659914, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.799124} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.957102] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 625.957365] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 625.957808] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-204e5197-ccfb-4749-b039-7d9eca53ded5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.968417] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 625.968417] env[65758]: value = "task-4659918" [ 625.968417] env[65758]: _type = "Task" [ 625.968417] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.980904] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659918, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.013034] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659915, 'name': ReconfigVM_Task, 'duration_secs': 0.468062} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.016870] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Reconfigured VM instance instance-0000000b to attach disk [datastore2] e48a075b-41b3-4612-bd5f-0a158d707a2f/e48a075b-41b3-4612-bd5f-0a158d707a2f.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 626.016957] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659916, 'name': CreateVM_Task, 'duration_secs': 0.446644} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.017151] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62a81073-bf67-4cd1-8b17-06d77d6acb97 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.019251] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 626.019801] env[65758]: WARNING neutronclient.v2_0.client [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 626.020287] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.020555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.020954] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 626.021752] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6acb0f1-69f2-485d-83b6-89822273824e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.031052] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 626.031052] env[65758]: value = "task-4659919" [ 626.031052] env[65758]: _type = "Task" [ 626.031052] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.033294] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 626.033294] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ec3abd-2685-1fc3-d10e-a7b5e73e7d69" [ 626.033294] env[65758]: _type = "Task" [ 626.033294] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.045781] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659917, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.058081] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ec3abd-2685-1fc3-d10e-a7b5e73e7d69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.058081] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659919, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.070036] env[65758]: DEBUG nova.network.neutron [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 626.091321] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "483765b5-c63c-4aac-9082-519bbc4e6eb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.093080] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.093080] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.014s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.096185] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.707s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.096572] env[65758]: DEBUG nova.objects.instance [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lazy-loading 'resources' on Instance uuid e4540963-7be9-426e-90f8-b31524d2237b {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 626.113870] env[65758]: DEBUG nova.compute.manager [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 626.133984] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.145341] env[65758]: INFO nova.scheduler.client.report [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Deleted allocations for instance 67fdb417-62ea-412c-8b82-868d59149f89 [ 626.149936] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 626.153700] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 626.153700] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 626.153700] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 626.153700] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 626.153700] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 626.153700] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 626.153958] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 626.153958] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 626.153958] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 626.153958] env[65758]: DEBUG nova.virt.hardware [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 626.153958] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b4971c-c8d4-4b5a-a531-37c57da03c91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.168107] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258e0d10-8491-4bf2-abb8-5431f2a53fb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.184704] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.190986] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Creating folder: Project (782a347799884da295a6369f61170122). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.192795] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c9748be-1a37-451d-9752-58d8d29ef346 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.206228] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Created folder: Project (782a347799884da295a6369f61170122) in parent group-v909763. [ 626.206353] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Creating folder: Instances. Parent ref: group-v909804. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.206653] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc9a252d-2db7-4b95-95cb-78935d045300 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.221771] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Created folder: Instances in parent group-v909804. [ 626.221771] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 626.221771] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 626.221771] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9db302a5-1150-4947-a57b-820e5664ecef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.243725] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.243725] env[65758]: value = "task-4659922" [ 626.243725] env[65758]: _type = "Task" [ 626.243725] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.256970] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659922, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.366894] env[65758]: WARNING neutronclient.v2_0.client [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 626.367594] env[65758]: WARNING openstack [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 626.367935] env[65758]: WARNING openstack [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 626.480594] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659918, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103906} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.482903] env[65758]: DEBUG nova.network.neutron [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Updating instance_info_cache with network_info: [{"id": "fc7dd128-390d-4176-b4ab-960fb037bc95", "address": "fa:16:3e:87:66:b3", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc7dd128-39", "ovs_interfaceid": "fc7dd128-390d-4176-b4ab-960fb037bc95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 626.484113] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 626.485716] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5ef30c-36f5-48d3-98ab-e2996605138b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.510013] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 626.511157] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71c4a517-f789-4db4-aae0-b8204c59e954 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.539160] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659917, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713388} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.551918] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e60efbcd-1c4e-40a1-8bc1-893daa511073/e60efbcd-1c4e-40a1-8bc1-893daa511073.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 626.552480] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 626.552693] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 626.552693] env[65758]: value = "task-4659923" [ 626.552693] env[65758]: _type = "Task" [ 626.552693] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.553422] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-69ec5d84-ce43-4c0a-a3b5-aa4fc3f2650f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.568648] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659919, 'name': Rename_Task, 'duration_secs': 0.256589} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.568999] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ec3abd-2685-1fc3-d10e-a7b5e73e7d69, 'name': SearchDatastore_Task, 'duration_secs': 0.059164} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.569766] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.570150] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.570466] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.570780] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.570936] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.571132] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.571809] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01fb1a87-61ac-4e93-ac95-80e4fcb77f1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.574049] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-febed8fe-6cd0-46c0-8bea-850cd3395057 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.581638] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 626.581638] env[65758]: value = "task-4659924" [ 626.581638] env[65758]: _type = "Task" [ 626.581638] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.582017] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.594814] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 626.594814] env[65758]: value = "task-4659925" [ 626.594814] env[65758]: _type = "Task" [ 626.594814] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.601693] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.601693] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.602193] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.606899] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d88bad84-0bcf-4860-8f81-9170f6d27fd9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.622456] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659925, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.624552] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 626.624552] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52dfb901-d9a6-ac38-f6d3-0a2d3672afae" [ 626.624552] env[65758]: _type = "Task" [ 626.624552] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.643424] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dfb901-d9a6-ac38-f6d3-0a2d3672afae, 'name': SearchDatastore_Task, 'duration_secs': 0.015155} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.643424] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c0ed99a-aa60-4ba3-a02b-384d312ec321 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.652985] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 626.652985] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525e2393-13d4-b2ea-f4f2-0e308da25c1c" [ 626.652985] env[65758]: _type = "Task" [ 626.652985] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.667043] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525e2393-13d4-b2ea-f4f2-0e308da25c1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.667043] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc1a0ea5-e399-4cdb-bdd3-70e07e2fb079 tempest-DeleteServersAdminTestJSON-1749091504 tempest-DeleteServersAdminTestJSON-1749091504-project-admin] Lock "67fdb417-62ea-412c-8b82-868d59149f89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.535s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.757886] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659922, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.943321] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a935c6-a8b7-4f94-b22a-4b01ebb4705f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.957025] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b465f3-951a-407b-8781-2ae6793ac05c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.989473] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-0ac196fa-d88c-45a8-999e-8b5216912041" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.989603] env[65758]: DEBUG nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Instance network_info: |[{"id": "fc7dd128-390d-4176-b4ab-960fb037bc95", "address": "fa:16:3e:87:66:b3", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc7dd128-39", "ovs_interfaceid": "fc7dd128-390d-4176-b4ab-960fb037bc95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 626.990540] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:66:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc7dd128-390d-4176-b4ab-960fb037bc95', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.999397] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating folder: Project (fd550f85853f447bb91a89b6bc6c5720). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.000220] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ed96dc-5565-4141-b006-26c57810b82b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.003273] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f0bdf40-7884-4cb0-b82c-c1cb7f6b4e4e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.012047] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7111d8f2-8807-4c97-b161-e91463abfa8f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.019065] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created folder: Project (fd550f85853f447bb91a89b6bc6c5720) in parent group-v909763. [ 627.019065] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating folder: Instances. Parent ref: group-v909807. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.019284] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4dce748b-80b2-408a-b3db-a7801a0f2e0e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.029658] env[65758]: DEBUG nova.compute.provider_tree [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.046758] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created folder: Instances in parent group-v909807. [ 627.047218] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 627.047459] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 627.047682] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8abe6b34-d657-44fd-a4d3-cba907d41cee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.076775] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.078412] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 627.078412] env[65758]: value = "task-4659928" [ 627.078412] env[65758]: _type = "Task" [ 627.078412] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.090660] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659928, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.097595] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092444} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.097927] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 627.098827] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90d51f9-1ffa-4612-a9ea-a3adba340f70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.111619] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659925, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.132273] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] e60efbcd-1c4e-40a1-8bc1-893daa511073/e60efbcd-1c4e-40a1-8bc1-893daa511073.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 627.132998] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2dde0d4-e487-4fe7-972d-ccb6a8e40e32 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.154734] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 627.154734] env[65758]: value = "task-4659929" [ 627.154734] env[65758]: _type = "Task" [ 627.154734] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.169144] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525e2393-13d4-b2ea-f4f2-0e308da25c1c, 'name': SearchDatastore_Task, 'duration_secs': 0.018711} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.172924] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.173281] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca/3049c522-d3bc-4ccf-93bd-0d1efe41d1ca.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.173851] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.174656] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3f195f5-c37d-4fda-9b71-046852642637 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.183341] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 627.183341] env[65758]: value = "task-4659930" [ 627.183341] env[65758]: _type = "Task" [ 627.183341] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.194873] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.256018] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659922, 'name': CreateVM_Task, 'duration_secs': 0.59864} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.256358] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.256790] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.256949] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.257298] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 627.257609] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da88068f-f8b8-4b7c-a826-ced97c7c22a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.263847] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 627.263847] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5214ce19-36a4-d235-7a02-ceabbe1b65cc" [ 627.263847] env[65758]: _type = "Task" [ 627.263847] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.273249] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5214ce19-36a4-d235-7a02-ceabbe1b65cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.430521] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Received event network-changed-9a9b1289-899b-4fe7-b1a8-cc090598a824 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 627.430679] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Refreshing instance network info cache due to event network-changed-9a9b1289-899b-4fe7-b1a8-cc090598a824. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 627.431160] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "refresh_cache-64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.431160] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquired lock "refresh_cache-64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.431334] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Refreshing network info cache for port 9a9b1289-899b-4fe7-b1a8-cc090598a824 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 627.536027] env[65758]: DEBUG nova.scheduler.client.report [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 627.579208] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659923, 'name': ReconfigVM_Task, 'duration_secs': 0.955812} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.581202] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.591018] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e9eb3bd-2f8d-47e8-8cdd-9514b8f6d09e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.602926] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659928, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.609026] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 627.609026] env[65758]: value = "task-4659931" [ 627.609026] env[65758]: _type = "Task" [ 627.609026] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.625028] env[65758]: DEBUG oslo_vmware.api [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4659925, 'name': PowerOnVM_Task, 'duration_secs': 0.538895} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.626048] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.627084] env[65758]: INFO nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Took 12.33 seconds to spawn the instance on the hypervisor. [ 627.627084] env[65758]: DEBUG nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 627.633307] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50da24b1-11fc-4697-a9cb-46bcb9650cf1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.636614] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659931, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.675431] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659929, 'name': ReconfigVM_Task, 'duration_secs': 0.501072} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.675698] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Reconfigured VM instance instance-0000000d to attach disk [datastore2] e60efbcd-1c4e-40a1-8bc1-893daa511073/e60efbcd-1c4e-40a1-8bc1-893daa511073.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.676440] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33f86dbd-4bea-4d21-8351-f2b4311fd256 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.685735] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 627.685735] env[65758]: value = "task-4659932" [ 627.685735] env[65758]: _type = "Task" [ 627.685735] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.701165] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659932, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.707522] env[65758]: DEBUG nova.compute.manager [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Received event network-changed-b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 627.707686] env[65758]: DEBUG nova.compute.manager [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Refreshing instance network info cache due to event network-changed-b574c870-790b-4dad-8dce-58d93bb6fe44. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 627.708116] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Acquiring lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.708116] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Acquired lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.708323] env[65758]: DEBUG nova.network.neutron [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Refreshing network info cache for port b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 627.710860] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659930, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.777671] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5214ce19-36a4-d235-7a02-ceabbe1b65cc, 'name': SearchDatastore_Task, 'duration_secs': 0.050709} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.777671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.777671] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.777959] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.777997] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.778185] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.778498] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1517223-765b-4a1b-b75d-00e23efeb2ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.794073] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.794300] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.795109] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48912c2e-689b-4fc2-9f1c-7233d565e9fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.802485] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 627.802485] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b25aeb-f555-9e96-fa22-28e69f16a521" [ 627.802485] env[65758]: _type = "Task" [ 627.802485] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.819268] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b25aeb-f555-9e96-fa22-28e69f16a521, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.938023] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 627.938023] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 627.938023] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 628.013171] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquiring lock "a2010738-759b-480a-8360-2639788056b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.013171] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "a2010738-759b-480a-8360-2639788056b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.041371] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.946s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.048078] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.958s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.051995] env[65758]: INFO nova.compute.claims [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.093834] env[65758]: INFO nova.scheduler.client.report [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Deleted allocations for instance e4540963-7be9-426e-90f8-b31524d2237b [ 628.109038] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659928, 'name': CreateVM_Task, 'duration_secs': 0.630134} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.109343] env[65758]: DEBUG nova.compute.manager [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 628.112073] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 628.113455] env[65758]: WARNING neutronclient.v2_0.client [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 628.113455] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.113455] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.113705] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 628.117412] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-720046a1-a0eb-433e-b785-37f72bb3cfd2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.131045] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquiring lock "9e16d31b-e84c-448b-9d83-98cac49570a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.131284] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "9e16d31b-e84c-448b-9d83-98cac49570a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.131474] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquiring lock "9e16d31b-e84c-448b-9d83-98cac49570a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.132620] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "9e16d31b-e84c-448b-9d83-98cac49570a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.132620] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "9e16d31b-e84c-448b-9d83-98cac49570a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.134181] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 628.134181] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52387d3a-aa25-3421-c991-5fd84515d10e" [ 628.134181] env[65758]: _type = "Task" [ 628.134181] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.134511] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659931, 'name': Rename_Task, 'duration_secs': 0.218216} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.134993] env[65758]: INFO nova.compute.manager [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Terminating instance [ 628.137611] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 628.141241] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-907109e1-7023-450b-b578-5d54590e1db6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.159787] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52387d3a-aa25-3421-c991-5fd84515d10e, 'name': SearchDatastore_Task, 'duration_secs': 0.01889} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.163130] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.163380] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 628.163578] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.164165] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 628.164165] env[65758]: value = "task-4659933" [ 628.164165] env[65758]: _type = "Task" [ 628.164165] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.164567] env[65758]: INFO nova.compute.manager [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Took 25.81 seconds to build instance. [ 628.177708] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659933, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.209925] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.860646} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.210061] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659932, 'name': Rename_Task, 'duration_secs': 0.206551} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.210746] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca/3049c522-d3bc-4ccf-93bd-0d1efe41d1ca.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 628.210746] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.210746] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 628.211261] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5d06bae-36ef-4dc9-8a6d-be0f8130639d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.213252] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-961fec49-ffb1-4a2b-887a-e0a34e18341e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.215570] env[65758]: WARNING neutronclient.v2_0.client [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 628.215824] env[65758]: WARNING openstack [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 628.216947] env[65758]: WARNING openstack [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 628.229737] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 628.229737] env[65758]: value = "task-4659935" [ 628.229737] env[65758]: _type = "Task" [ 628.229737] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.234056] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 628.234056] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 628.234056] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 628.245633] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 628.245633] env[65758]: value = "task-4659934" [ 628.245633] env[65758]: _type = "Task" [ 628.245633] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.264300] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659935, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.267885] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659934, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.317109] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b25aeb-f555-9e96-fa22-28e69f16a521, 'name': SearchDatastore_Task, 'duration_secs': 0.059534} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.318032] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb02c280-4730-4cee-b5bc-00636237eb48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.326966] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 628.326966] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bf0091-3a49-b04b-5ea6-ae4f3959f621" [ 628.326966] env[65758]: _type = "Task" [ 628.326966] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.337440] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bf0091-3a49-b04b-5ea6-ae4f3959f621, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.430489] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Updated VIF entry in instance network info cache for port 9a9b1289-899b-4fe7-b1a8-cc090598a824. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 628.432032] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Updating instance_info_cache with network_info: [{"id": "9a9b1289-899b-4fe7-b1a8-cc090598a824", "address": "fa:16:3e:b4:9a:30", "network": {"id": "b386f8af-e1bb-4d47-8e97-5bf03a5c9af1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1680045637-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc5591f5fd643b7a836022e19f60b52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9b1289-89", "ovs_interfaceid": "9a9b1289-899b-4fe7-b1a8-cc090598a824", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 628.610417] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31e3c16f-6e58-44eb-8866-1b828285d4be tempest-ServerDiagnosticsNegativeTest-983666065 tempest-ServerDiagnosticsNegativeTest-983666065-project-member] Lock "e4540963-7be9-426e-90f8-b31524d2237b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.163s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.647505] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.648170] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquiring lock "refresh_cache-9e16d31b-e84c-448b-9d83-98cac49570a0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.648819] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquired lock "refresh_cache-9e16d31b-e84c-448b-9d83-98cac49570a0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.649134] env[65758]: DEBUG nova.network.neutron [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 628.671785] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7aa54b90-88bd-4d18-a53f-c4466aa6caef tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "e48a075b-41b3-4612-bd5f-0a158d707a2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.172s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.682886] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659933, 'name': PowerOnVM_Task} progress is 87%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.714230] env[65758]: WARNING neutronclient.v2_0.client [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 628.714570] env[65758]: WARNING openstack [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 628.715072] env[65758]: WARNING openstack [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 628.755951] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659935, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.765095] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659934, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075997} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.765390] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.766185] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741472e2-3fbb-45ea-a08b-7e40bf30e5cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.790521] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca/3049c522-d3bc-4ccf-93bd-0d1efe41d1ca.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.790876] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42e82a5d-71de-413e-a801-a221ff8a1bec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.816460] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 628.816460] env[65758]: value = "task-4659936" [ 628.816460] env[65758]: _type = "Task" [ 628.816460] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.826214] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659936, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.839365] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bf0091-3a49-b04b-5ea6-ae4f3959f621, 'name': SearchDatastore_Task, 'duration_secs': 0.012236} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.839637] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.840150] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.840150] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.840400] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 628.840550] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6f20e83-a3b3-4841-b00b-317fc03f3b0b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.842710] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-baca35d7-4e09-4ab4-9025-dabad482749a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.851864] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 628.851864] env[65758]: value = "task-4659937" [ 628.851864] env[65758]: _type = "Task" [ 628.851864] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.856518] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 628.856518] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 628.863024] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f1571ef-3843-4dee-9399-7ec273f41353 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.868059] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.872238] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 628.872238] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5211de4b-ab41-b48a-3804-32cbd579f316" [ 628.872238] env[65758]: _type = "Task" [ 628.872238] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.882286] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5211de4b-ab41-b48a-3804-32cbd579f316, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.901074] env[65758]: DEBUG nova.network.neutron [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Updated VIF entry in instance network info cache for port b574c870-790b-4dad-8dce-58d93bb6fe44. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 628.901470] env[65758]: DEBUG nova.network.neutron [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Updating instance_info_cache with network_info: [{"id": "b574c870-790b-4dad-8dce-58d93bb6fe44", "address": "fa:16:3e:62:75:62", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb574c870-79", "ovs_interfaceid": "b574c870-790b-4dad-8dce-58d93bb6fe44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 628.934831] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Releasing lock "refresh_cache-64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.935136] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Received event network-vif-deleted-83c16429-d108-4a97-84ec-81e4398f9881 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 628.935326] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Received event network-vif-deleted-af073ff0-f4c1-43b4-bf05-beb5e71db8ac {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 628.935496] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Received event network-vif-plugged-30a1632d-59ad-4b45-bb29-73404b1abc7c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 628.935676] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "e48a075b-41b3-4612-bd5f-0a158d707a2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.935901] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Lock "e48a075b-41b3-4612-bd5f-0a158d707a2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.936065] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Lock "e48a075b-41b3-4612-bd5f-0a158d707a2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.936223] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] No waiting events found dispatching network-vif-plugged-30a1632d-59ad-4b45-bb29-73404b1abc7c {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 628.936376] env[65758]: WARNING nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Received unexpected event network-vif-plugged-30a1632d-59ad-4b45-bb29-73404b1abc7c for instance with vm_state building and task_state spawning. [ 628.936600] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Received event network-changed-30a1632d-59ad-4b45-bb29-73404b1abc7c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 628.936665] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Refreshing instance network info cache due to event network-changed-30a1632d-59ad-4b45-bb29-73404b1abc7c. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 628.936837] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "refresh_cache-e48a075b-41b3-4612-bd5f-0a158d707a2f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.936964] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquired lock "refresh_cache-e48a075b-41b3-4612-bd5f-0a158d707a2f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.937121] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Refreshing network info cache for port 30a1632d-59ad-4b45-bb29-73404b1abc7c {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 629.155625] env[65758]: WARNING neutronclient.v2_0.client [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 629.156343] env[65758]: WARNING openstack [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 629.156697] env[65758]: WARNING openstack [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 629.179291] env[65758]: DEBUG nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 629.183795] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659933, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.261646] env[65758]: DEBUG nova.network.neutron [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 629.268943] env[65758]: DEBUG oslo_vmware.api [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4659935, 'name': PowerOnVM_Task, 'duration_secs': 0.908488} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.269267] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 629.269590] env[65758]: INFO nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Took 9.57 seconds to spawn the instance on the hypervisor. [ 629.269590] env[65758]: DEBUG nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 629.270462] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bf9684-6f98-4b84-b5ed-6f009ab8cd25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.334543] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659936, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.360035] env[65758]: DEBUG nova.network.neutron [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 629.371707] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659937, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.390601] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5211de4b-ab41-b48a-3804-32cbd579f316, 'name': SearchDatastore_Task, 'duration_secs': 0.026528} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.392742] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ec49975-d832-4054-8232-6f1ad947606a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.400782] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 629.400782] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d20deb-4563-aa2f-b616-ae84f453fbcb" [ 629.400782] env[65758]: _type = "Task" [ 629.400782] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.404802] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Releasing lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.406084] env[65758]: DEBUG nova.compute.manager [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Received event network-vif-plugged-fc7dd128-390d-4176-b4ab-960fb037bc95 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 629.406084] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Acquiring lock "0ac196fa-d88c-45a8-999e-8b5216912041-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.406084] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Lock "0ac196fa-d88c-45a8-999e-8b5216912041-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.406084] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Lock "0ac196fa-d88c-45a8-999e-8b5216912041-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.406084] env[65758]: DEBUG nova.compute.manager [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] No waiting events found dispatching network-vif-plugged-fc7dd128-390d-4176-b4ab-960fb037bc95 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 629.406340] env[65758]: WARNING nova.compute.manager [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Received unexpected event network-vif-plugged-fc7dd128-390d-4176-b4ab-960fb037bc95 for instance with vm_state building and task_state spawning. [ 629.406340] env[65758]: DEBUG nova.compute.manager [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Received event network-changed-fc7dd128-390d-4176-b4ab-960fb037bc95 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 629.406340] env[65758]: DEBUG nova.compute.manager [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Refreshing instance network info cache due to event network-changed-fc7dd128-390d-4176-b4ab-960fb037bc95. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 629.406455] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Acquiring lock "refresh_cache-0ac196fa-d88c-45a8-999e-8b5216912041" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.406582] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Acquired lock "refresh_cache-0ac196fa-d88c-45a8-999e-8b5216912041" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.406724] env[65758]: DEBUG nova.network.neutron [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Refreshing network info cache for port fc7dd128-390d-4176-b4ab-960fb037bc95 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 629.418750] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d20deb-4563-aa2f-b616-ae84f453fbcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.442892] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 629.443599] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 629.443945] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 629.586928] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c1ce19-9c4e-4a2f-9801-41929f274228 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.605880] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e7fc8d-cf49-4d12-9941-2eb2e8b45a9d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.649301] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0375100a-99c7-4024-bc26-cb70a3841b24 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.662947] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a653637-4277-4711-9147-86aa20125b59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.683885] env[65758]: DEBUG oslo_vmware.api [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659933, 'name': PowerOnVM_Task, 'duration_secs': 1.090382} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.691370] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 629.691453] env[65758]: INFO nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Took 11.97 seconds to spawn the instance on the hypervisor. [ 629.691981] env[65758]: DEBUG nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 629.698828] env[65758]: DEBUG nova.compute.provider_tree [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.706510] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0231ee-0cef-4165-a366-b147b558319b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.743980] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.766474] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 629.767272] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 629.768167] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 629.797340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquiring lock "0addcbb1-3561-4c93-b714-37e6b613b962" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.797533] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "0addcbb1-3561-4c93-b714-37e6b613b962" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.802115] env[65758]: INFO nova.compute.manager [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Took 25.66 seconds to build instance. [ 629.828825] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659936, 'name': ReconfigVM_Task, 'duration_secs': 0.8138} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.829292] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca/3049c522-d3bc-4ccf-93bd-0d1efe41d1ca.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.829723] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b59b059-40e9-4864-917b-308f7171141b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.842945] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 629.842945] env[65758]: value = "task-4659938" [ 629.842945] env[65758]: _type = "Task" [ 629.842945] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.853793] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659938, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.863520] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Releasing lock "refresh_cache-9e16d31b-e84c-448b-9d83-98cac49570a0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.863943] env[65758]: DEBUG nova.compute.manager [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 629.864161] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 629.865278] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.958879} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.866047] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0fc27c-2424-4505-ba22-70bee4e297aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.871699] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.872066] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.872881] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7516dcd6-c7b6-44ac-810e-d9732a392f77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.881431] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 629.883816] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Updated VIF entry in instance network info cache for port 30a1632d-59ad-4b45-bb29-73404b1abc7c. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 629.884240] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Updating instance_info_cache with network_info: [{"id": "30a1632d-59ad-4b45-bb29-73404b1abc7c", "address": "fa:16:3e:24:e3:7f", "network": {"id": "1b7f73a4-ad38-4ec6-8c7a-4bd567147409", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1265965383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5fc14c9e85d404a8a6db0167ac84491", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a1632d-59", "ovs_interfaceid": "30a1632d-59ad-4b45-bb29-73404b1abc7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 629.886851] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a89afcca-3e3f-4890-bf2d-79201fc7d40d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.888869] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 629.888869] env[65758]: value = "task-4659939" [ 629.888869] env[65758]: _type = "Task" [ 629.888869] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.896200] env[65758]: DEBUG oslo_vmware.api [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 629.896200] env[65758]: value = "task-4659940" [ 629.896200] env[65758]: _type = "Task" [ 629.896200] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.904690] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.915498] env[65758]: WARNING neutronclient.v2_0.client [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 629.918053] env[65758]: WARNING openstack [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 629.918053] env[65758]: WARNING openstack [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 629.928398] env[65758]: DEBUG oslo_vmware.api [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.936324] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d20deb-4563-aa2f-b616-ae84f453fbcb, 'name': SearchDatastore_Task, 'duration_secs': 0.097069} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.936602] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.936851] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041/0ac196fa-d88c-45a8-999e-8b5216912041.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 629.937526] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62ee58c8-0d89-43c5-b236-aa29077c0154 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.947527] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 629.947527] env[65758]: value = "task-4659941" [ 629.947527] env[65758]: _type = "Task" [ 629.947527] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.957946] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.213204] env[65758]: DEBUG nova.scheduler.client.report [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 630.232036] env[65758]: INFO nova.compute.manager [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Took 26.72 seconds to build instance. [ 630.242937] env[65758]: WARNING neutronclient.v2_0.client [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 630.244134] env[65758]: WARNING openstack [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 630.244134] env[65758]: WARNING openstack [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 630.306289] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10a07b33-02db-4b9a-8040-0fe73425f285 tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.830s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.357535] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659938, 'name': Rename_Task, 'duration_secs': 0.254159} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.358390] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 630.358828] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d16c58e-be5e-406a-ab83-e61a6ba0649c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.371356] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 630.371356] env[65758]: value = "task-4659942" [ 630.371356] env[65758]: _type = "Task" [ 630.371356] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.379796] env[65758]: DEBUG nova.network.neutron [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Updated VIF entry in instance network info cache for port fc7dd128-390d-4176-b4ab-960fb037bc95. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 630.380445] env[65758]: DEBUG nova.network.neutron [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Updating instance_info_cache with network_info: [{"id": "fc7dd128-390d-4176-b4ab-960fb037bc95", "address": "fa:16:3e:87:66:b3", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc7dd128-39", "ovs_interfaceid": "fc7dd128-390d-4176-b4ab-960fb037bc95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 630.393790] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Releasing lock "refresh_cache-e48a075b-41b3-4612-bd5f-0a158d707a2f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.394261] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Received event network-vif-plugged-df4cf195-46a9-4de5-ae34-2363de4377f0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 630.394560] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "2d787237-26e5-4519-9f6e-1d30b9d016cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.394954] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Lock "2d787237-26e5-4519-9f6e-1d30b9d016cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.395290] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Lock "2d787237-26e5-4519-9f6e-1d30b9d016cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.395560] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] No waiting events found dispatching network-vif-plugged-df4cf195-46a9-4de5-ae34-2363de4377f0 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 630.395890] env[65758]: WARNING nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Received unexpected event network-vif-plugged-df4cf195-46a9-4de5-ae34-2363de4377f0 for instance with vm_state building and task_state spawning. [ 630.396258] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Received event network-changed-df4cf195-46a9-4de5-ae34-2363de4377f0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 630.396550] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Refreshing instance network info cache due to event network-changed-df4cf195-46a9-4de5-ae34-2363de4377f0. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 630.397061] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "refresh_cache-2d787237-26e5-4519-9f6e-1d30b9d016cf" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.397316] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquired lock "refresh_cache-2d787237-26e5-4519-9f6e-1d30b9d016cf" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.397578] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Refreshing network info cache for port df4cf195-46a9-4de5-ae34-2363de4377f0 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 630.399870] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659942, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.421218] env[65758]: DEBUG oslo_vmware.api [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659940, 'name': PowerOffVM_Task, 'duration_secs': 0.265947} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.425185] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 630.425448] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 630.430029] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119342} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.430029] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33fffc4b-f331-4296-89fc-058eb341e982 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.430029] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.430029] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5085ef7a-9ac8-4bff-b28f-60b37ee4c07f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.455073] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.455741] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca0bac38-7318-4344-9174-e86140e1af47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.478106] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 630.480155] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 630.480155] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Deleting the datastore file [datastore2] 9e16d31b-e84c-448b-9d83-98cac49570a0 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 630.480155] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca142630-cffd-4e16-8cd2-4a69f3169c73 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.486835] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 630.486835] env[65758]: value = "task-4659944" [ 630.486835] env[65758]: _type = "Task" [ 630.486835] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.495024] env[65758]: DEBUG oslo_vmware.api [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for the task: (returnval){ [ 630.495024] env[65758]: value = "task-4659945" [ 630.495024] env[65758]: _type = "Task" [ 630.495024] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.495024] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659941, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.510281] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.514729] env[65758]: DEBUG oslo_vmware.api [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.719872] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.674s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.720903] env[65758]: DEBUG nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 630.724080] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.309s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.725071] env[65758]: DEBUG nova.objects.instance [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lazy-loading 'resources' on Instance uuid a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 630.735890] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bbb9967b-3dd1-4a69-8476-bbd22f0d620a tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "2d787237-26e5-4519-9f6e-1d30b9d016cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.750s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.810046] env[65758]: DEBUG nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 630.865402] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "24016efd-cdb3-4c1e-9c08-8643400e729e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.865641] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "24016efd-cdb3-4c1e-9c08-8643400e729e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.884618] env[65758]: DEBUG oslo_concurrency.lockutils [req-8b270f80-9731-4db6-ae65-854c5d39a5ee req-6ee3fc13-3f0b-4fd7-a1b9-422c2dd558ad service nova] Releasing lock "refresh_cache-0ac196fa-d88c-45a8-999e-8b5216912041" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.885139] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659942, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.886130] env[65758]: DEBUG nova.compute.manager [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Received event network-changed-b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 630.886309] env[65758]: DEBUG nova.compute.manager [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Refreshing instance network info cache due to event network-changed-b574c870-790b-4dad-8dce-58d93bb6fe44. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 630.886508] env[65758]: DEBUG oslo_concurrency.lockutils [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] Acquiring lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.886743] env[65758]: DEBUG oslo_concurrency.lockutils [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] Acquired lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.886796] env[65758]: DEBUG nova.network.neutron [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Refreshing network info cache for port b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 630.906341] env[65758]: DEBUG nova.compute.manager [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Received event network-changed-fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 630.906545] env[65758]: DEBUG nova.compute.manager [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Refreshing instance network info cache due to event network-changed-fb1e683c-095a-4512-a0a0-ec651a275ab8. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 630.906752] env[65758]: DEBUG oslo_concurrency.lockutils [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] Acquiring lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.906974] env[65758]: DEBUG oslo_concurrency.lockutils [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] Acquired lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.907081] env[65758]: DEBUG nova.network.neutron [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Refreshing network info cache for port fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 630.912022] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 630.912022] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 630.912022] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 630.971225] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.72661} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.973362] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041/0ac196fa-d88c-45a8-999e-8b5216912041.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 630.973535] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 630.974157] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc34c769-ec51-461b-81d6-4dd9f1cce6e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.983775] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 630.983775] env[65758]: value = "task-4659946" [ 630.983775] env[65758]: _type = "Task" [ 630.983775] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.998218] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.012798] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.016893] env[65758]: DEBUG oslo_vmware.api [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Task: {'id': task-4659945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.448192} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.017476] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 631.017476] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 631.017476] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 631.017653] env[65758]: INFO nova.compute.manager [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 631.017830] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 631.018032] env[65758]: DEBUG nova.compute.manager [-] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 631.018188] env[65758]: DEBUG nova.network.neutron [-] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 631.018734] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 631.019059] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 631.019317] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 631.050302] env[65758]: DEBUG nova.network.neutron [-] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 631.050636] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 631.133391] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 631.134083] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 631.134555] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 631.228964] env[65758]: DEBUG nova.compute.utils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 631.238933] env[65758]: DEBUG nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 631.239191] env[65758]: DEBUG nova.network.neutron [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 631.239533] env[65758]: WARNING neutronclient.v2_0.client [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 631.239908] env[65758]: WARNING neutronclient.v2_0.client [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 631.241044] env[65758]: WARNING openstack [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 631.241426] env[65758]: WARNING openstack [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 631.250870] env[65758]: DEBUG nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 631.341296] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.354898] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Updated VIF entry in instance network info cache for port df4cf195-46a9-4de5-ae34-2363de4377f0. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 631.355362] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Updating instance_info_cache with network_info: [{"id": "df4cf195-46a9-4de5-ae34-2363de4377f0", "address": "fa:16:3e:a2:35:0f", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf4cf195-46", "ovs_interfaceid": "df4cf195-46a9-4de5-ae34-2363de4377f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 631.390769] env[65758]: WARNING neutronclient.v2_0.client [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 631.391593] env[65758]: WARNING openstack [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 631.392207] env[65758]: WARNING openstack [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 631.401439] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659942, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.418037] env[65758]: WARNING neutronclient.v2_0.client [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 631.418674] env[65758]: WARNING openstack [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 631.419119] env[65758]: WARNING openstack [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 631.508782] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203128} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.509946] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 631.516904] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89759aa-88d8-4d08-9ee7-3951cf3a5946 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.531726] env[65758]: DEBUG nova.policy [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e60e042c807349bf8ba4420749e694fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd999e5f3384e4a24ad9ec68b2fa3fda7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 631.540804] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquiring lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.540804] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.541380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquiring lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.541380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.541380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.542958] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659944, 'name': ReconfigVM_Task, 'duration_secs': 0.836841} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.543964] env[65758]: INFO nova.compute.manager [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Terminating instance [ 631.548969] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.551360] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a113380f-c8b0-4cae-a874-5b0fa4bfdc3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.565039] env[65758]: DEBUG nova.network.neutron [-] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 631.575158] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041/0ac196fa-d88c-45a8-999e-8b5216912041.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 631.579548] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01e520a4-e2f9-4b37-9c89-19ada553294f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.602175] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 631.602175] env[65758]: value = "task-4659947" [ 631.602175] env[65758]: _type = "Task" [ 631.602175] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.611821] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 631.611821] env[65758]: value = "task-4659948" [ 631.611821] env[65758]: _type = "Task" [ 631.611821] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.620693] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659947, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.631273] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659948, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.740118] env[65758]: DEBUG nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 631.782433] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.801391] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd13c5f7-cfb0-4d31-8f6e-25c1cd64e342 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.813184] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7747ac71-430f-4985-8181-d887e724c6a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.847856] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e75935c-433d-4814-8826-def7e6bf166c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.856950] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c5ef8a-3c06-4767-997f-03c0be18ffbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.861766] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Releasing lock "refresh_cache-2d787237-26e5-4519-9f6e-1d30b9d016cf" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.861963] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Received event network-vif-plugged-afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 631.862179] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "e60efbcd-1c4e-40a1-8bc1-893daa511073-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.862384] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.862568] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.862736] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] No waiting events found dispatching network-vif-plugged-afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 631.862909] env[65758]: WARNING nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Received unexpected event network-vif-plugged-afb9abca-e097-4678-9ae2-5b3775cf16e9 for instance with vm_state building and task_state spawning. [ 631.863085] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Received event network-changed-afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 631.863244] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Refreshing instance network info cache due to event network-changed-afb9abca-e097-4678-9ae2-5b3775cf16e9. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 631.863426] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.863561] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquired lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.863702] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Refreshing network info cache for port afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 631.876907] env[65758]: DEBUG nova.compute.provider_tree [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.890179] env[65758]: DEBUG oslo_vmware.api [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4659942, 'name': PowerOnVM_Task, 'duration_secs': 1.06248} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.890452] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 631.890744] env[65758]: INFO nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Took 9.71 seconds to spawn the instance on the hypervisor. [ 631.890985] env[65758]: DEBUG nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 631.891983] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6859ee35-956a-40b9-b28b-9ff4a7887dc8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.036912] env[65758]: DEBUG nova.network.neutron [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Successfully created port: 4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 632.067755] env[65758]: DEBUG nova.compute.manager [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 632.067968] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 632.069195] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667823be-2ee3-4600-8dc4-f5e3935d15b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.078327] env[65758]: INFO nova.compute.manager [-] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Took 1.06 seconds to deallocate network for instance. [ 632.078790] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 632.081102] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85f5c138-8fbf-4ac8-a601-fff492fb2a0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.093923] env[65758]: DEBUG oslo_vmware.api [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 632.093923] env[65758]: value = "task-4659949" [ 632.093923] env[65758]: _type = "Task" [ 632.093923] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.103247] env[65758]: DEBUG oslo_vmware.api [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.114293] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659947, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.124009] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659948, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.367433] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 632.368303] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 632.368876] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 632.385044] env[65758]: DEBUG nova.scheduler.client.report [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.413854] env[65758]: INFO nova.compute.manager [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Took 27.27 seconds to build instance. [ 632.589052] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.605528] env[65758]: DEBUG oslo_vmware.api [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.615747] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659947, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.625389] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659948, 'name': ReconfigVM_Task, 'duration_secs': 0.521342} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.625845] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041/0ac196fa-d88c-45a8-999e-8b5216912041.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 632.626369] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5e71af4-1a6c-4aa7-920e-25cabf059998 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.633820] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 632.633820] env[65758]: value = "task-4659950" [ 632.633820] env[65758]: _type = "Task" [ 632.633820] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.643188] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659950, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.752306] env[65758]: DEBUG nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 632.786114] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 632.786491] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 632.786737] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 632.787037] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 632.787267] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 632.787505] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 632.787832] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.788103] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 632.788375] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 632.788661] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 632.788935] env[65758]: DEBUG nova.virt.hardware [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 632.790334] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72596a0b-0cf0-46c6-a139-61ec32688fd5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.802447] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffef15f8-a553-44da-9d13-c3b516dfaa4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.888852] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.891523] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.671s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.893516] env[65758]: INFO nova.compute.claims [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.917117] env[65758]: INFO nova.scheduler.client.report [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Deleted allocations for instance a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974 [ 632.919147] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69092a20-05ce-4047-b135-8a93ad9c9310 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.154s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.106240] env[65758]: DEBUG oslo_vmware.api [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659949, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.115937] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659947, 'name': Rename_Task, 'duration_secs': 1.252916} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.116417] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.117396] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0b383aa-d862-4be7-97a3-5ead430833da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.126533] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 633.126533] env[65758]: value = "task-4659951" [ 633.126533] env[65758]: _type = "Task" [ 633.126533] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.136732] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659951, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.147617] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659950, 'name': Rename_Task, 'duration_secs': 0.288927} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.147886] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.148159] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6415aae-9235-45fb-83ad-a1339135b698 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.157642] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 633.157642] env[65758]: value = "task-4659952" [ 633.157642] env[65758]: _type = "Task" [ 633.157642] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.171702] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.430071] env[65758]: DEBUG nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 633.433903] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6eb034ea-e1c7-471e-8028-fabb9775a14f tempest-TenantUsagesTestJSON-1449198332 tempest-TenantUsagesTestJSON-1449198332-project-member] Lock "a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.646s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.479832] env[65758]: WARNING neutronclient.v2_0.client [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 633.481971] env[65758]: WARNING openstack [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 633.482590] env[65758]: WARNING openstack [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 633.607034] env[65758]: DEBUG oslo_vmware.api [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659949, 'name': PowerOffVM_Task, 'duration_secs': 1.028133} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.607893] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.608079] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.608445] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0a294d3-28b2-4963-abbc-500a85f3dfd0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.640860] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659951, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.669538] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659952, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.705172] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.705339] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.705515] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Deleting the datastore file [datastore1] 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.705798] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de191fd8-4c23-42fc-8eeb-de87d3177497 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.713711] env[65758]: DEBUG oslo_vmware.api [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for the task: (returnval){ [ 633.713711] env[65758]: value = "task-4659954" [ 633.713711] env[65758]: _type = "Task" [ 633.713711] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.724246] env[65758]: DEBUG oslo_vmware.api [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.798311] env[65758]: DEBUG nova.network.neutron [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Successfully updated port: 4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 633.829458] env[65758]: WARNING neutronclient.v2_0.client [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 633.830170] env[65758]: WARNING openstack [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 633.830569] env[65758]: WARNING openstack [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 633.958771] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.144975] env[65758]: DEBUG oslo_vmware.api [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659951, 'name': PowerOnVM_Task, 'duration_secs': 0.546771} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.145384] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.145645] env[65758]: INFO nova.compute.manager [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Took 8.03 seconds to spawn the instance on the hypervisor. [ 634.145731] env[65758]: DEBUG nova.compute.manager [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 634.146682] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0cf899-973e-4dcc-be59-a18ed495d632 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.173925] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659952, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.241599] env[65758]: DEBUG oslo_vmware.api [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Task: {'id': task-4659954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.286298} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.244892] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 634.244892] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 634.245170] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.245207] env[65758]: INFO nova.compute.manager [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Took 2.18 seconds to destroy the instance on the hypervisor. [ 634.245442] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 634.248543] env[65758]: DEBUG nova.compute.manager [-] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 634.248672] env[65758]: DEBUG nova.network.neutron [-] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 634.249288] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 634.249549] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 634.250069] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 634.302876] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "refresh_cache-f7a14628-cc55-41fa-ae89-3958855df8a7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.302876] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquired lock "refresh_cache-f7a14628-cc55-41fa-ae89-3958855df8a7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.302876] env[65758]: DEBUG nova.network.neutron [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 634.321026] env[65758]: DEBUG nova.network.neutron [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Updated VIF entry in instance network info cache for port b574c870-790b-4dad-8dce-58d93bb6fe44. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 634.321317] env[65758]: DEBUG nova.network.neutron [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Updating instance_info_cache with network_info: [{"id": "b574c870-790b-4dad-8dce-58d93bb6fe44", "address": "fa:16:3e:62:75:62", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb574c870-79", "ovs_interfaceid": "b574c870-790b-4dad-8dce-58d93bb6fe44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 634.360189] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d77738-bd9f-41a8-9446-45f3f175a6c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.370718] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28fdc85-7535-4982-ac5e-2d8d0d390427 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.406355] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24e404d-7e37-4fd4-91d2-698c5c47d58e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.418846] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec55ca9-5b5e-4b62-a0e4-6e36b630854d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.434896] env[65758]: DEBUG nova.compute.provider_tree [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.603034] env[65758]: DEBUG nova.network.neutron [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updated VIF entry in instance network info cache for port fb1e683c-095a-4512-a0a0-ec651a275ab8. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 634.603415] env[65758]: DEBUG nova.network.neutron [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updating instance_info_cache with network_info: [{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 634.673688] env[65758]: DEBUG oslo_vmware.api [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659952, 'name': PowerOnVM_Task, 'duration_secs': 1.344654} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.675640] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.675969] env[65758]: INFO nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Took 10.55 seconds to spawn the instance on the hypervisor. [ 634.676020] env[65758]: DEBUG nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 634.676471] env[65758]: INFO nova.compute.manager [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Took 26.15 seconds to build instance. [ 634.677779] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9332fdb-5f36-4602-959e-33c974fb10ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.719806] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 634.762147] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 634.762821] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 634.763264] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 634.805726] env[65758]: WARNING openstack [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 634.806144] env[65758]: WARNING openstack [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 634.825172] env[65758]: DEBUG oslo_concurrency.lockutils [req-e9a7e01d-46e7-4b4d-a038-f3ae62f748b9 req-ef1cfcbf-3570-41d4-aeb6-917fd2e7357a service nova] Releasing lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.942019] env[65758]: DEBUG nova.network.neutron [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 634.943173] env[65758]: DEBUG nova.scheduler.client.report [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 635.020043] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Updated VIF entry in instance network info cache for port afb9abca-e097-4678-9ae2-5b3775cf16e9. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 635.020043] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Updating instance_info_cache with network_info: [{"id": "afb9abca-e097-4678-9ae2-5b3775cf16e9", "address": "fa:16:3e:dd:80:22", "network": {"id": "dec9f876-3382-4488-90e2-702f201ed688", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-107507873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d100ba970de24698aff03c4c537b3c18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb9abca-e0", "ovs_interfaceid": "afb9abca-e097-4678-9ae2-5b3775cf16e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 635.106236] env[65758]: DEBUG oslo_concurrency.lockutils [req-efc89f59-2907-48af-b8a2-9a957db04a56 req-5d708da9-7868-480d-a0b2-d81ffc0b34df service nova] Releasing lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.166098] env[65758]: WARNING neutronclient.v2_0.client [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 635.166909] env[65758]: WARNING openstack [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 635.168355] env[65758]: WARNING openstack [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 635.181916] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef881e3d-004c-4c07-ae31-9384ea8f1185 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "24379189-b10a-4ef6-a3f6-b7bb43029dab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.879s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.200218] env[65758]: INFO nova.compute.manager [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Took 28.22 seconds to build instance. [ 635.267563] env[65758]: DEBUG nova.network.neutron [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Updating instance_info_cache with network_info: [{"id": "4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7", "address": "fa:16:3e:a3:af:6e", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d1d9ca2-db", "ovs_interfaceid": "4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 635.451419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.452158] env[65758]: DEBUG nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 635.455060] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.626s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.455528] env[65758]: DEBUG nova.objects.instance [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lazy-loading 'resources' on Instance uuid f1a1650b-4c45-47fc-9c45-f4625c959597 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 635.527577] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Releasing lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.527577] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Received event network-vif-deleted-f3319916-956f-49ba-9da5-ad0df9c5953c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 635.527780] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Received event network-vif-plugged-09d73b49-88a0-426f-915b-c6c03998738f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 635.527970] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.528204] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.528364] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.528547] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] No waiting events found dispatching network-vif-plugged-09d73b49-88a0-426f-915b-c6c03998738f {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 635.528734] env[65758]: WARNING nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Received unexpected event network-vif-plugged-09d73b49-88a0-426f-915b-c6c03998738f for instance with vm_state building and task_state spawning. [ 635.529221] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Received event network-changed-09d73b49-88a0-426f-915b-c6c03998738f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 635.529861] env[65758]: DEBUG nova.compute.manager [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Refreshing instance network info cache due to event network-changed-09d73b49-88a0-426f-915b-c6c03998738f. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 635.529960] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquiring lock "refresh_cache-3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.530139] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Acquired lock "refresh_cache-3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.530303] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Refreshing network info cache for port 09d73b49-88a0-426f-915b-c6c03998738f {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 635.545085] env[65758]: DEBUG nova.network.neutron [-] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 635.702656] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d23c86a9-ff51-4670-a1aa-50cc28d5bf24 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "0ac196fa-d88c-45a8-999e-8b5216912041" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.732s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.737312] env[65758]: DEBUG nova.compute.manager [req-221b9a6c-f28c-42b0-8cc7-0347ec029df7 req-b980c697-b059-40bf-acfc-fed7674e0a24 service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Received event network-vif-plugged-4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 635.737312] env[65758]: DEBUG oslo_concurrency.lockutils [req-221b9a6c-f28c-42b0-8cc7-0347ec029df7 req-b980c697-b059-40bf-acfc-fed7674e0a24 service nova] Acquiring lock "f7a14628-cc55-41fa-ae89-3958855df8a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.737312] env[65758]: DEBUG oslo_concurrency.lockutils [req-221b9a6c-f28c-42b0-8cc7-0347ec029df7 req-b980c697-b059-40bf-acfc-fed7674e0a24 service nova] Lock "f7a14628-cc55-41fa-ae89-3958855df8a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.737312] env[65758]: DEBUG oslo_concurrency.lockutils [req-221b9a6c-f28c-42b0-8cc7-0347ec029df7 req-b980c697-b059-40bf-acfc-fed7674e0a24 service nova] Lock "f7a14628-cc55-41fa-ae89-3958855df8a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.737688] env[65758]: DEBUG nova.compute.manager [req-221b9a6c-f28c-42b0-8cc7-0347ec029df7 req-b980c697-b059-40bf-acfc-fed7674e0a24 service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] No waiting events found dispatching network-vif-plugged-4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 635.737990] env[65758]: WARNING nova.compute.manager [req-221b9a6c-f28c-42b0-8cc7-0347ec029df7 req-b980c697-b059-40bf-acfc-fed7674e0a24 service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Received unexpected event network-vif-plugged-4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7 for instance with vm_state building and task_state spawning. [ 635.769996] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Releasing lock "refresh_cache-f7a14628-cc55-41fa-ae89-3958855df8a7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.770406] env[65758]: DEBUG nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Instance network_info: |[{"id": "4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7", "address": "fa:16:3e:a3:af:6e", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d1d9ca2-db", "ovs_interfaceid": "4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 635.771295] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:af:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 635.778798] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 635.779353] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 635.780217] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a22b1087-e0fc-4aaf-a2b1-a5746f7ed61d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.803036] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 635.803036] env[65758]: value = "task-4659955" [ 635.803036] env[65758]: _type = "Task" [ 635.803036] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.812848] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659955, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.959390] env[65758]: DEBUG nova.compute.utils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 635.963794] env[65758]: DEBUG nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 635.964724] env[65758]: DEBUG nova.network.neutron [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 635.964724] env[65758]: WARNING neutronclient.v2_0.client [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 635.964724] env[65758]: WARNING neutronclient.v2_0.client [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 635.965168] env[65758]: WARNING openstack [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 635.965539] env[65758]: WARNING openstack [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 636.022981] env[65758]: DEBUG nova.policy [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8dcbde9f217e4ebd847282da61e502ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45aad313d10447e9ba61ed0a05b915ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.035382] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 636.036156] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 636.036388] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 636.048639] env[65758]: INFO nova.compute.manager [-] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Took 1.80 seconds to deallocate network for instance. [ 636.316211] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659955, 'name': CreateVM_Task, 'duration_secs': 0.487847} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.320404] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 636.321242] env[65758]: WARNING neutronclient.v2_0.client [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 636.321622] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.321821] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.324210] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 636.324424] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0f0e680-a4db-4092-8fa1-9cf7c05f7f29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.335951] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 636.335951] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245c824-5acf-ddf9-176d-3788f3632e0d" [ 636.335951] env[65758]: _type = "Task" [ 636.335951] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.346708] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5245c824-5acf-ddf9-176d-3788f3632e0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.404150] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de5ffb4-326c-4b7e-a722-6e56b20d2fc4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.419143] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0ebe21-b12a-4906-91f0-d1c05cca4e06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.453287] env[65758]: DEBUG nova.network.neutron [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Successfully created port: 0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 636.458868] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412dd1cf-7605-4546-9195-89b16b99a8e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.464204] env[65758]: DEBUG nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 636.470578] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82280876-b0df-4f18-9e3a-f5fd1a0790a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.488137] env[65758]: DEBUG nova.compute.provider_tree [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.557067] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.656199] env[65758]: DEBUG nova.compute.manager [req-fd0491ea-3056-4fef-882a-e6c1558625e4 req-1f56342e-006b-4f90-b4a3-4f26af91957b service nova] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Received event network-vif-deleted-9a9b1289-899b-4fe7-b1a8-cc090598a824 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 636.671724] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquiring lock "8eb65797-072b-4a7e-853d-26c0adc51bb2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.671964] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "8eb65797-072b-4a7e-853d-26c0adc51bb2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.848269] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5245c824-5acf-ddf9-176d-3788f3632e0d, 'name': SearchDatastore_Task, 'duration_secs': 0.014068} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.848591] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.848980] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 636.849262] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.849723] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.849723] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 636.849894] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bd70850-b61d-4906-b151-5a8fa5a57d7a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.861121] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 636.861699] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 636.862094] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-891e568e-10c9-4c44-9c96-3a0a68fa100d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.868782] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 636.868782] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522c9fc0-a689-bea8-dfe6-0115dc704f0e" [ 636.868782] env[65758]: _type = "Task" [ 636.868782] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.878316] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522c9fc0-a689-bea8-dfe6-0115dc704f0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.995046] env[65758]: DEBUG nova.scheduler.client.report [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.111848] env[65758]: WARNING neutronclient.v2_0.client [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 637.112632] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 637.113085] env[65758]: WARNING openstack [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 637.175684] env[65758]: DEBUG nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 637.385117] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522c9fc0-a689-bea8-dfe6-0115dc704f0e, 'name': SearchDatastore_Task, 'duration_secs': 0.026472} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.386387] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dda70a1-8583-401b-b294-f07f15aa1a38 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.396076] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 637.396076] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5227610c-7378-b1ff-a6c0-b51194e5d960" [ 637.396076] env[65758]: _type = "Task" [ 637.396076] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.409100] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5227610c-7378-b1ff-a6c0-b51194e5d960, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.477501] env[65758]: DEBUG nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 637.500329] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.045s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.508019] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:14:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='309042309',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1929231467',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 637.508220] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.508616] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 637.508616] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.508776] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 637.509623] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 637.509915] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 637.510052] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 637.510224] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 637.510375] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 637.511288] env[65758]: DEBUG nova.virt.hardware [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 637.512814] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.172s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.517033] env[65758]: INFO nova.compute.claims [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.517548] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a292bce8-9f5f-4569-ad2a-3ae87fb8e815 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.532523] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f13030-7c98-4995-8339-39e0b1672c4a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.593084] env[65758]: INFO nova.scheduler.client.report [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Deleted allocations for instance f1a1650b-4c45-47fc-9c45-f4625c959597 [ 637.647991] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Updated VIF entry in instance network info cache for port 09d73b49-88a0-426f-915b-c6c03998738f. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 637.648379] env[65758]: DEBUG nova.network.neutron [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Updating instance_info_cache with network_info: [{"id": "09d73b49-88a0-426f-915b-c6c03998738f", "address": "fa:16:3e:17:ed:60", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09d73b49-88", "ovs_interfaceid": "09d73b49-88a0-426f-915b-c6c03998738f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 637.710611] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.907693] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5227610c-7378-b1ff-a6c0-b51194e5d960, 'name': SearchDatastore_Task, 'duration_secs': 0.016443} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.908040] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.908323] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] f7a14628-cc55-41fa-ae89-3958855df8a7/f7a14628-cc55-41fa-ae89-3958855df8a7.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 637.908641] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-190f477d-60bd-49c4-a439-7a0350aadef9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.916827] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 637.916827] env[65758]: value = "task-4659956" [ 637.916827] env[65758]: _type = "Task" [ 637.916827] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.925948] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.106033] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13921140-8668-458e-9d47-ea1e40361467 tempest-InstanceActionsNegativeTestJSON-502192177 tempest-InstanceActionsNegativeTestJSON-502192177-project-member] Lock "f1a1650b-4c45-47fc-9c45-f4625c959597" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.260s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.154295] env[65758]: DEBUG oslo_concurrency.lockutils [req-5abf2ca5-4254-4353-87a4-7a0bf4c3a020 req-c2339858-b339-48d4-8c37-c76c0e5a05ac service nova] Releasing lock "refresh_cache-3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.259399] env[65758]: INFO nova.compute.manager [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Rebuilding instance [ 638.297380] env[65758]: DEBUG nova.network.neutron [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Successfully updated port: 0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 638.323463] env[65758]: DEBUG nova.compute.manager [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 638.324921] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97af3219-8952-4117-b4a5-1b7cc844200a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.429491] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659956, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.801592] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.801823] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquired lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.801983] env[65758]: DEBUG nova.network.neutron [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 638.928835] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659956, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.930940] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55da5c69-a481-4911-a406-751b12e15482 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.939236] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8aea56-069e-4f65-b4fb-fcd55a2c92bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.973221] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c9ae3d-fe6e-482d-bd62-d62b5ce80c6d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.982361] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8220a3-19e7-42dc-ba60-2b5ab96a9774 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.997535] env[65758]: DEBUG nova.compute.provider_tree [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.307087] env[65758]: WARNING openstack [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 639.308352] env[65758]: WARNING openstack [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 639.341367] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 639.341878] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28a24a29-6b83-47bb-85af-bdf106a801ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.350659] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 639.350659] env[65758]: value = "task-4659957" [ 639.350659] env[65758]: _type = "Task" [ 639.350659] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.361115] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.434948] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659956, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.046811} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.435336] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] f7a14628-cc55-41fa-ae89-3958855df8a7/f7a14628-cc55-41fa-ae89-3958855df8a7.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 639.437126] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 639.437126] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aea2eede-7bae-4d72-9251-00ecdb9bf9a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.445955] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 639.445955] env[65758]: value = "task-4659958" [ 639.445955] env[65758]: _type = "Task" [ 639.445955] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.456219] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.502096] env[65758]: DEBUG nova.scheduler.client.report [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.740538] env[65758]: DEBUG nova.network.neutron [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 639.866029] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659957, 'name': PowerOffVM_Task, 'duration_secs': 0.226106} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.866029] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 639.866029] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 639.866029] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d28a958-16dc-4d0c-b2e8-d0a85ae26d77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.871835] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 639.872111] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95f40f20-3701-41b8-9b56-4f08d30c469b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.905607] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 639.905833] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 639.906052] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Deleting the datastore file [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 639.906298] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ebfed9c-e0ea-4da8-bf33-8580fdc933be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.915496] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 639.915496] env[65758]: value = "task-4659960" [ 639.915496] env[65758]: _type = "Task" [ 639.915496] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.930221] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.957557] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069165} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.957944] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 639.958818] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012e601b-5ff8-46de-9f9a-3b89fe82866a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.985792] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] f7a14628-cc55-41fa-ae89-3958855df8a7/f7a14628-cc55-41fa-ae89-3958855df8a7.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 639.986211] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49c0d9f4-8f87-4d8b-935a-cd0b79441bc6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.011372] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.011907] env[65758]: DEBUG nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 640.018951] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 640.018951] env[65758]: value = "task-4659961" [ 640.018951] env[65758]: _type = "Task" [ 640.018951] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.019495] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.886s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.021246] env[65758]: INFO nova.compute.claims [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.037174] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.428698] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099665} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.429192] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 640.429546] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 640.429897] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 640.524142] env[65758]: DEBUG nova.compute.utils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 640.524142] env[65758]: DEBUG nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 640.524142] env[65758]: DEBUG nova.network.neutron [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 640.524388] env[65758]: WARNING neutronclient.v2_0.client [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 640.524822] env[65758]: WARNING neutronclient.v2_0.client [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 640.525869] env[65758]: WARNING openstack [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 640.526399] env[65758]: WARNING openstack [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 640.549273] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.790659] env[65758]: DEBUG nova.compute.manager [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Received event network-changed-4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 640.790878] env[65758]: DEBUG nova.compute.manager [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Refreshing instance network info cache due to event network-changed-4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 640.791136] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Acquiring lock "refresh_cache-f7a14628-cc55-41fa-ae89-3958855df8a7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.791295] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Acquired lock "refresh_cache-f7a14628-cc55-41fa-ae89-3958855df8a7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.791490] env[65758]: DEBUG nova.network.neutron [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Refreshing network info cache for port 4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 640.889643] env[65758]: WARNING neutronclient.v2_0.client [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 640.889847] env[65758]: WARNING openstack [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 640.890217] env[65758]: WARNING openstack [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 641.037105] env[65758]: DEBUG nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 641.060736] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659961, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.085972] env[65758]: DEBUG nova.policy [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf7d55f5316749d29d99cf06e5cd5267', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfe0dc92ebaa427fb02d92b1ef37a08c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 641.244958] env[65758]: DEBUG nova.network.neutron [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Updating instance_info_cache with network_info: [{"id": "0e626ecf-0686-4626-9e0c-31a51751b185", "address": "fa:16:3e:60:c5:ef", "network": {"id": "3770aad6-39a0-41da-84d1-b6aa69c0dfad", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-982589002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45aad313d10447e9ba61ed0a05b915ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e626ecf-06", "ovs_interfaceid": "0e626ecf-0686-4626-9e0c-31a51751b185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 641.302267] env[65758]: WARNING neutronclient.v2_0.client [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 641.302267] env[65758]: WARNING openstack [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 641.302267] env[65758]: WARNING openstack [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 641.395281] env[65758]: DEBUG nova.compute.manager [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Received event network-vif-plugged-0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 641.395505] env[65758]: DEBUG oslo_concurrency.lockutils [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Acquiring lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.395709] env[65758]: DEBUG oslo_concurrency.lockutils [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.395877] env[65758]: DEBUG oslo_concurrency.lockutils [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.396819] env[65758]: DEBUG nova.compute.manager [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] No waiting events found dispatching network-vif-plugged-0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 641.397199] env[65758]: WARNING nova.compute.manager [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Received unexpected event network-vif-plugged-0e626ecf-0686-4626-9e0c-31a51751b185 for instance with vm_state building and task_state spawning. [ 641.397258] env[65758]: DEBUG nova.compute.manager [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Received event network-changed-0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 641.397447] env[65758]: DEBUG nova.compute.manager [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Refreshing instance network info cache due to event network-changed-0e626ecf-0686-4626-9e0c-31a51751b185. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 641.397682] env[65758]: DEBUG oslo_concurrency.lockutils [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Acquiring lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.453470] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "9e007d55-0a5c-4469-a546-9b18e188bea0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.453751] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "9e007d55-0a5c-4469-a546-9b18e188bea0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.484903] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 641.484903] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 641.484903] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 641.484903] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 641.485397] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 641.485397] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 641.485397] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 641.485397] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 641.485586] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 641.485632] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 641.485785] env[65758]: DEBUG nova.virt.hardware [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 641.487112] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb17545-dd2e-4a70-bec2-c07809d0d1dc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.503039] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199a380e-38f0-4a66-9d80-4648092ef0f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.524299] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.529977] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 641.531969] env[65758]: INFO nova.compute.manager [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Rebuilding instance [ 641.536430] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 641.537236] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4b1607-aa88-4f85-af7a-6e349412644e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.542219] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfbcb071-c5b8-4a70-a496-ab92996b7b13 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.561611] env[65758]: WARNING neutronclient.v2_0.client [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 641.562126] env[65758]: WARNING openstack [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 641.562602] env[65758]: WARNING openstack [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 641.587947] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf7ab33-aab4-4955-a989-940088afd7c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.598229] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.598229] env[65758]: value = "task-4659962" [ 641.598229] env[65758]: _type = "Task" [ 641.598229] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.599279] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659961, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.635845] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8241185-37df-454c-8f7a-329e89631126 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.643188] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659962, 'name': CreateVM_Task} progress is 15%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.653216] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3b34ca-c18f-4167-8511-0ce71882bcd3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.665850] env[65758]: DEBUG nova.compute.manager [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 641.667034] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b688ffb-4642-491a-b9da-ac6878f048aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.681923] env[65758]: DEBUG nova.compute.provider_tree [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.694435] env[65758]: DEBUG nova.network.neutron [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Successfully created port: b3d6b993-f74a-48db-b23d-102e47b4f09b {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 641.748753] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Releasing lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.748753] env[65758]: DEBUG nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Instance network_info: |[{"id": "0e626ecf-0686-4626-9e0c-31a51751b185", "address": "fa:16:3e:60:c5:ef", "network": {"id": "3770aad6-39a0-41da-84d1-b6aa69c0dfad", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-982589002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45aad313d10447e9ba61ed0a05b915ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e626ecf-06", "ovs_interfaceid": "0e626ecf-0686-4626-9e0c-31a51751b185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 641.751109] env[65758]: DEBUG oslo_concurrency.lockutils [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Acquired lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.751109] env[65758]: DEBUG nova.network.neutron [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Refreshing network info cache for port 0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 641.754029] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:c5:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ada35c98-01a9-4352-98e4-1d20ba31f928', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e626ecf-0686-4626-9e0c-31a51751b185', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.776507] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Creating folder: Project (45aad313d10447e9ba61ed0a05b915ba). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 641.780326] env[65758]: DEBUG nova.network.neutron [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Updated VIF entry in instance network info cache for port 4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 641.780703] env[65758]: DEBUG nova.network.neutron [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Updating instance_info_cache with network_info: [{"id": "4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7", "address": "fa:16:3e:a3:af:6e", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d1d9ca2-db", "ovs_interfaceid": "4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 641.782187] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37b20594-46c7-4a77-89b3-50dd26703246 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.798022] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Created folder: Project (45aad313d10447e9ba61ed0a05b915ba) in parent group-v909763. [ 641.798459] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Creating folder: Instances. Parent ref: group-v909812. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 641.799602] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87ad7a4d-fe56-409e-89f2-99070f8c4ba6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.813510] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Created folder: Instances in parent group-v909812. [ 641.813510] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 641.813510] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 641.813803] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-982d4453-ab6b-421b-ad4c-58241e211e8a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.839179] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.839179] env[65758]: value = "task-4659965" [ 641.839179] env[65758]: _type = "Task" [ 641.839179] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.849499] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659965, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.956671] env[65758]: DEBUG nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 642.075067] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659961, 'name': ReconfigVM_Task, 'duration_secs': 1.800088} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.075454] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Reconfigured VM instance instance-00000011 to attach disk [datastore1] f7a14628-cc55-41fa-ae89-3958855df8a7/f7a14628-cc55-41fa-ae89-3958855df8a7.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.076646] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-263f5b49-d730-417e-a078-97ae432cbbe4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.082751] env[65758]: DEBUG nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 642.086519] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 642.086519] env[65758]: value = "task-4659966" [ 642.086519] env[65758]: _type = "Task" [ 642.086519] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.096247] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659966, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.113249] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 642.113531] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.113679] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 642.113904] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.114043] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 642.114785] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 642.114785] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 642.114785] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 642.114785] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 642.114785] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 642.115022] env[65758]: DEBUG nova.virt.hardware [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 642.115835] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9884a840-2119-4171-a744-d8aa057817d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.122180] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659962, 'name': CreateVM_Task, 'duration_secs': 0.322682} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.122970] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 642.123258] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.123413] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.123838] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 642.124253] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4b313aa-07c0-41e3-a22c-64e1636c0c47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.129482] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dff4d2c-4c79-4f75-a45b-1494aecaa798 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.135083] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 642.135083] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5235bb25-91ea-dd42-f81b-40b9050767ad" [ 642.135083] env[65758]: _type = "Task" [ 642.135083] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.154114] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5235bb25-91ea-dd42-f81b-40b9050767ad, 'name': SearchDatastore_Task, 'duration_secs': 0.011584} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.154422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.154650] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.154925] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.155009] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.155181] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.155441] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e823ff4-0a8f-433b-9217-e09727c32f47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.167025] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.167025] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.167025] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fb197ac-8512-4807-a772-97cf8be378d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.173541] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 642.173541] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ff40e8-fafd-bf24-4535-233bd8653d0c" [ 642.173541] env[65758]: _type = "Task" [ 642.173541] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.182612] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ff40e8-fafd-bf24-4535-233bd8653d0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.187689] env[65758]: DEBUG nova.scheduler.client.report [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 642.278566] env[65758]: WARNING neutronclient.v2_0.client [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 642.279242] env[65758]: WARNING openstack [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 642.279703] env[65758]: WARNING openstack [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 642.287770] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Releasing lock "refresh_cache-f7a14628-cc55-41fa-ae89-3958855df8a7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.288096] env[65758]: DEBUG nova.compute.manager [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Received event network-changed-afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 642.288363] env[65758]: DEBUG nova.compute.manager [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Refreshing instance network info cache due to event network-changed-afb9abca-e097-4678-9ae2-5b3775cf16e9. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 642.288626] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Acquiring lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.288853] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Acquired lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.289196] env[65758]: DEBUG nova.network.neutron [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Refreshing network info cache for port afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 642.348962] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659965, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.482882] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.602211] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659966, 'name': Rename_Task, 'duration_secs': 0.262613} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.602461] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.602724] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b8208ae-c525-4a16-9698-2df21be56223 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.611386] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 642.611386] env[65758]: value = "task-4659967" [ 642.611386] env[65758]: _type = "Task" [ 642.611386] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.625057] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.684367] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ff40e8-fafd-bf24-4535-233bd8653d0c, 'name': SearchDatastore_Task, 'duration_secs': 0.013054} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.686286] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e52dd4ea-e3a5-4707-b432-5a07ac70f3b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.692809] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.673s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.693478] env[65758]: DEBUG nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 642.697137] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 642.697137] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523fe7e6-72e4-1485-a510-edfcbdc3b23a" [ 642.697137] env[65758]: _type = "Task" [ 642.697137] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.697627] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 14.050s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.703212] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 642.703212] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3443af6b-1666-4c2c-8bc0-becbe7c2520e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.711883] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523fe7e6-72e4-1485-a510-edfcbdc3b23a, 'name': SearchDatastore_Task, 'duration_secs': 0.010543} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.713557] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.713958] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.714162] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 642.714162] env[65758]: value = "task-4659968" [ 642.714162] env[65758]: _type = "Task" [ 642.714162] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.714377] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-316335d0-5522-4f61-ad2e-8c15e6c7fa85 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.726784] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.728425] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 642.728425] env[65758]: value = "task-4659969" [ 642.728425] env[65758]: _type = "Task" [ 642.728425] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.740872] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659969, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.792940] env[65758]: WARNING neutronclient.v2_0.client [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 642.793571] env[65758]: WARNING openstack [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 642.794469] env[65758]: WARNING openstack [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 642.849947] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659965, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.125078] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659967, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.199031] env[65758]: DEBUG nova.compute.utils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 643.205450] env[65758]: DEBUG nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 643.205450] env[65758]: DEBUG nova.network.neutron [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 643.205450] env[65758]: WARNING neutronclient.v2_0.client [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 643.205450] env[65758]: WARNING neutronclient.v2_0.client [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 643.205450] env[65758]: WARNING openstack [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 643.205742] env[65758]: WARNING openstack [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 643.215901] env[65758]: INFO nova.compute.claims [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.239286] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659968, 'name': PowerOffVM_Task, 'duration_secs': 0.269099} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.239286] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 643.239467] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 643.240482] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeaee34d-5156-4ee9-a20e-fa749e913a4a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.249173] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659969, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.255022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 643.255339] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b839f4d8-b040-48c4-89a5-c319ffd1daf7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.337878] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 643.338688] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 643.338688] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleting the datastore file [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 643.338688] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e30bb35d-fcdd-4011-8e94-5d83d7e4a344 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.352140] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659965, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.353843] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 643.353843] env[65758]: value = "task-4659971" [ 643.353843] env[65758]: _type = "Task" [ 643.353843] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.362757] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.386399] env[65758]: DEBUG nova.network.neutron [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Successfully updated port: b3d6b993-f74a-48db-b23d-102e47b4f09b {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 643.625906] env[65758]: DEBUG oslo_vmware.api [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4659967, 'name': PowerOnVM_Task, 'duration_secs': 0.638135} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.626334] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.626554] env[65758]: INFO nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Took 10.87 seconds to spawn the instance on the hypervisor. [ 643.626730] env[65758]: DEBUG nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 643.627588] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4deeb512-1d28-443c-91b6-348af5554499 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.713660] env[65758]: DEBUG nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 643.722462] env[65758]: INFO nova.compute.resource_tracker [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating resource usage from migration 28721f73-0009-4427-b697-d46294cf6cb7 [ 643.748447] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659969, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726219} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.749955] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.749955] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.750741] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f44aa8cd-7576-4c2a-8780-aba017b4b2e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.764040] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 643.764040] env[65758]: value = "task-4659972" [ 643.764040] env[65758]: _type = "Task" [ 643.764040] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.778576] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659972, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.854976] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659965, 'name': CreateVM_Task, 'duration_secs': 1.760223} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.859640] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 643.863171] env[65758]: WARNING neutronclient.v2_0.client [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 643.863633] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.863858] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.864294] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 643.865227] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-923598ad-0068-456d-8339-eb126e77145e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.872754] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253886} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.873441] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 643.873738] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 643.874059] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 643.878695] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 643.878695] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526cff72-ed4d-22dd-7db5-8cbb3ae960e4" [ 643.878695] env[65758]: _type = "Task" [ 643.878695] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.898480] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquiring lock "refresh_cache-28ccc013-962d-4607-83a2-5fcd480c27b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.898770] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquired lock "refresh_cache-28ccc013-962d-4607-83a2-5fcd480c27b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.899049] env[65758]: DEBUG nova.network.neutron [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 643.901871] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526cff72-ed4d-22dd-7db5-8cbb3ae960e4, 'name': SearchDatastore_Task, 'duration_secs': 0.010796} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.903686] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.904730] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.909021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.909021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.909021] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 643.909021] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39445baa-640b-4364-a64a-c3e14cee08e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.931643] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 643.932142] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 643.933052] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16279e60-3af1-4a95-8138-13a58cce0d6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.943049] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 643.943049] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ac9d88-5d18-602f-683e-5efafec40cc3" [ 643.943049] env[65758]: _type = "Task" [ 643.943049] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.954973] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ac9d88-5d18-602f-683e-5efafec40cc3, 'name': SearchDatastore_Task, 'duration_secs': 0.011937} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.955827] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d65b5b1-a823-40a8-b7f2-73e86348f412 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.970776] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 643.970776] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52724752-6b4e-ffce-3582-91d5ca674243" [ 643.970776] env[65758]: _type = "Task" [ 643.970776] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.984610] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52724752-6b4e-ffce-3582-91d5ca674243, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.130396] env[65758]: DEBUG nova.policy [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '113b998b73e24b4c83a766803f371ed5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f9334e3f62046f491e3defaac1653b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 644.150132] env[65758]: INFO nova.compute.manager [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Took 27.09 seconds to build instance. [ 644.156459] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f261aac-a4ae-472e-a7df-e6de7d276a8c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.173283] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64367f71-10f7-4f42-82fe-379e9320d0be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.209996] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcd20ab-4077-4752-8983-0e32c90e7d3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.222250] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d063a2-20a9-40c1-b8f5-6ad70a19a40a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.239215] env[65758]: DEBUG nova.compute.provider_tree [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.275169] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659972, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080126} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.275441] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.276235] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e14c294-fa01-4488-b4d7-6c871dadefde {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.298647] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.299365] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f19de619-cae3-4d75-97b2-43b85e9e440e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.323201] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 644.323201] env[65758]: value = "task-4659973" [ 644.323201] env[65758]: _type = "Task" [ 644.323201] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.335094] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659973, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.404196] env[65758]: WARNING openstack [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 644.405325] env[65758]: WARNING openstack [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 644.482607] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52724752-6b4e-ffce-3582-91d5ca674243, 'name': SearchDatastore_Task, 'duration_secs': 0.020972} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.482859] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.483124] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 1e249ca9-a7a8-440f-832b-a8f5d84ada8b/1e249ca9-a7a8-440f-832b-a8f5d84ada8b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 644.483376] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5686cd90-a4d8-4680-be1a-3c07be5ebb7c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.491639] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 644.491639] env[65758]: value = "task-4659974" [ 644.491639] env[65758]: _type = "Task" [ 644.491639] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.500807] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.652423] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b26118-8aef-463e-b1b8-97bcd7cfc808 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "f7a14628-cc55-41fa-ae89-3958855df8a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.302s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.728705] env[65758]: DEBUG nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 644.745478] env[65758]: DEBUG nova.scheduler.client.report [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 644.764240] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 644.764500] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 644.764648] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 644.764832] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 644.765047] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 644.765246] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 644.765445] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 644.765561] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 644.765717] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 644.765872] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 644.766047] env[65758]: DEBUG nova.virt.hardware [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 644.766969] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4d4002-235d-41ae-8cd3-b3fffbcd6b1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.778370] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c82263-f05b-4c85-b3ea-64cdcd9592d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.835637] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659973, 'name': ReconfigVM_Task, 'duration_secs': 0.297114} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.836155] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 644.836886] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dad2bc1-67d3-491b-b08f-354d775e99ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.846349] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 644.846349] env[65758]: value = "task-4659975" [ 644.846349] env[65758]: _type = "Task" [ 644.846349] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.859414] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659975, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.920132] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 644.920413] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 644.920566] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 644.920774] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 644.920935] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 644.921370] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 644.921605] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 644.921792] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 644.922245] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 644.922245] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 644.922375] env[65758]: DEBUG nova.virt.hardware [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 644.923292] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a38dcbc-6d2c-46b1-b220-3f5de5ff966e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.935156] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dcb6b5-91bc-45c5-95a3-30c104a5c696 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.955411] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:66:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc7dd128-390d-4176-b4ab-960fb037bc95', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 644.964582] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 644.964913] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 644.965173] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7da06cc0-a8d0-42f9-8be9-6e43aa9141f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.988360] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 644.988360] env[65758]: value = "task-4659976" [ 644.988360] env[65758]: _type = "Task" [ 644.988360] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.003149] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659974, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.006654] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659976, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.234577] env[65758]: WARNING neutronclient.v2_0.client [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 645.235955] env[65758]: WARNING openstack [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 645.235955] env[65758]: WARNING openstack [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 645.252112] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.555s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.252367] env[65758]: INFO nova.compute.manager [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Migrating [ 645.252606] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.252886] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "compute-rpcapi-router" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.256712] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.512s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.257723] env[65758]: INFO nova.compute.claims [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.365253] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659975, 'name': Rename_Task, 'duration_secs': 0.263392} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.365489] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.365767] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d84ac5f-c2e0-4731-9f10-cdeebfed32e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.368466] env[65758]: DEBUG nova.network.neutron [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Successfully created port: f909dddc-4c03-4424-acfc-d0739864ec6e {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 645.371699] env[65758]: DEBUG nova.network.neutron [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 645.380254] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 645.380254] env[65758]: value = "task-4659977" [ 645.380254] env[65758]: _type = "Task" [ 645.380254] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.390438] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.443923] env[65758]: WARNING neutronclient.v2_0.client [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 645.444605] env[65758]: WARNING openstack [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 645.444945] env[65758]: WARNING openstack [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 645.514485] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659976, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.518759] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525388} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.519135] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 1e249ca9-a7a8-440f-832b-a8f5d84ada8b/1e249ca9-a7a8-440f-832b-a8f5d84ada8b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 645.519391] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 645.519731] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee208b70-a59e-436d-ad55-1bc768609662 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.529362] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 645.529362] env[65758]: value = "task-4659978" [ 645.529362] env[65758]: _type = "Task" [ 645.529362] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.541207] env[65758]: DEBUG nova.network.neutron [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Updated VIF entry in instance network info cache for port 0e626ecf-0686-4626-9e0c-31a51751b185. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 645.541710] env[65758]: DEBUG nova.network.neutron [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Updating instance_info_cache with network_info: [{"id": "0e626ecf-0686-4626-9e0c-31a51751b185", "address": "fa:16:3e:60:c5:ef", "network": {"id": "3770aad6-39a0-41da-84d1-b6aa69c0dfad", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-982589002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45aad313d10447e9ba61ed0a05b915ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e626ecf-06", "ovs_interfaceid": "0e626ecf-0686-4626-9e0c-31a51751b185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 645.550709] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659978, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.756903] env[65758]: INFO nova.compute.rpcapi [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Automatically selected compute RPC version 6.4 from minimum service version 70 [ 645.760167] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "compute-rpcapi-router" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.896181] env[65758]: DEBUG oslo_vmware.api [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4659977, 'name': PowerOnVM_Task, 'duration_secs': 0.490849} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.896564] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 645.896786] env[65758]: DEBUG nova.compute.manager [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 645.897828] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02211ddc-387d-47a9-9213-72912e105510 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.004986] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659976, 'name': CreateVM_Task, 'duration_secs': 0.654638} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.005844] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 646.006767] env[65758]: WARNING neutronclient.v2_0.client [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 646.007384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.007625] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.008175] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 646.008692] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c33cd098-9856-40eb-8209-d8494d32afcc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.015772] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 646.015772] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523e2b39-5bf3-960b-40f6-5889b63b8cd7" [ 646.015772] env[65758]: _type = "Task" [ 646.015772] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.028025] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523e2b39-5bf3-960b-40f6-5889b63b8cd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.040405] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659978, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082412} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.040675] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.041876] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035e2dfb-a3e6-4344-9093-6578869f2ef5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.046108] env[65758]: DEBUG oslo_concurrency.lockutils [req-a0e2839f-7d6c-4357-9a5a-62b9392dad8f req-ff28dc87-d028-48d1-b064-a81479a24907 service nova] Releasing lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.067075] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 1e249ca9-a7a8-440f-832b-a8f5d84ada8b/1e249ca9-a7a8-440f-832b-a8f5d84ada8b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.067445] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25c1843c-fe1a-43cb-9b17-c4f17f1c0d98 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.095481] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 646.095481] env[65758]: value = "task-4659979" [ 646.095481] env[65758]: _type = "Task" [ 646.095481] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.109158] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659979, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.177020] env[65758]: DEBUG nova.network.neutron [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Updated VIF entry in instance network info cache for port afb9abca-e097-4678-9ae2-5b3775cf16e9. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 646.177020] env[65758]: DEBUG nova.network.neutron [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Updating instance_info_cache with network_info: [{"id": "afb9abca-e097-4678-9ae2-5b3775cf16e9", "address": "fa:16:3e:dd:80:22", "network": {"id": "dec9f876-3382-4488-90e2-702f201ed688", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-107507873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d100ba970de24698aff03c4c537b3c18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb9abca-e0", "ovs_interfaceid": "afb9abca-e097-4678-9ae2-5b3775cf16e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 646.277552] env[65758]: WARNING neutronclient.v2_0.client [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 646.278775] env[65758]: WARNING openstack [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 646.279490] env[65758]: WARNING openstack [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 646.293251] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.293479] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.293657] env[65758]: DEBUG nova.network.neutron [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 646.425400] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.453813] env[65758]: DEBUG nova.network.neutron [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Updating instance_info_cache with network_info: [{"id": "b3d6b993-f74a-48db-b23d-102e47b4f09b", "address": "fa:16:3e:7b:95:30", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3d6b993-f7", "ovs_interfaceid": "b3d6b993-f74a-48db-b23d-102e47b4f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 646.528391] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523e2b39-5bf3-960b-40f6-5889b63b8cd7, 'name': SearchDatastore_Task, 'duration_secs': 0.031048} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.529384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.529384] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 646.529384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.529384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.529606] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 646.529746] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30ec1e8f-1aa0-4e8d-9ebe-e660141979d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.544933] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 646.545163] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 646.546025] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81870b63-8947-4417-9615-a2b7dcabfe2d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.559044] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 646.559044] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5250d154-d8a4-f177-2d38-8d92610c791b" [ 646.559044] env[65758]: _type = "Task" [ 646.559044] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.571595] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5250d154-d8a4-f177-2d38-8d92610c791b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.609982] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659979, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.681648] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b8ee568-8c95-4ada-a25c-373cf39dbec4 req-cf0f1af5-8105-41fa-90ce-d73da862a828 service nova] Releasing lock "refresh_cache-e60efbcd-1c4e-40a1-8bc1-893daa511073" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.796734] env[65758]: WARNING neutronclient.v2_0.client [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 646.797677] env[65758]: WARNING openstack [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 646.799632] env[65758]: WARNING openstack [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 646.808429] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3138825a-5533-4819-9f83-b0dfc7243c2d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.820050] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f878d27-d5b2-4516-8666-25a0153497a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.859121] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6dbbdb4-7703-476c-b57a-e3f5a9ab4a48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.870030] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5882da09-0d50-44e8-873d-061acaed8fe0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.893139] env[65758]: DEBUG nova.compute.provider_tree [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.959389] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Releasing lock "refresh_cache-28ccc013-962d-4607-83a2-5fcd480c27b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.959757] env[65758]: DEBUG nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Instance network_info: |[{"id": "b3d6b993-f74a-48db-b23d-102e47b4f09b", "address": "fa:16:3e:7b:95:30", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3d6b993-f7", "ovs_interfaceid": "b3d6b993-f74a-48db-b23d-102e47b4f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 646.960342] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:95:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3d6b993-f74a-48db-b23d-102e47b4f09b', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 646.968218] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Creating folder: Project (cfe0dc92ebaa427fb02d92b1ef37a08c). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 646.968575] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-106b0bc1-77dd-4125-b9d6-88884ad86e54 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.983663] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Created folder: Project (cfe0dc92ebaa427fb02d92b1ef37a08c) in parent group-v909763. [ 646.983907] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Creating folder: Instances. Parent ref: group-v909816. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 646.984284] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-833a653d-f9b2-4114-9710-eb5252473af9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.999980] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Created folder: Instances in parent group-v909816. [ 646.999980] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 646.999980] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 646.999980] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b5b584f-09c3-4c02-a2d6-0f3b6ab6d00d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.023867] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 647.023867] env[65758]: value = "task-4659982" [ 647.023867] env[65758]: _type = "Task" [ 647.023867] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.036957] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659982, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.051369] env[65758]: DEBUG nova.compute.manager [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Received event network-vif-plugged-b3d6b993-f74a-48db-b23d-102e47b4f09b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 647.051369] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Acquiring lock "28ccc013-962d-4607-83a2-5fcd480c27b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.051639] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Lock "28ccc013-962d-4607-83a2-5fcd480c27b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.051763] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Lock "28ccc013-962d-4607-83a2-5fcd480c27b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.051923] env[65758]: DEBUG nova.compute.manager [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] No waiting events found dispatching network-vif-plugged-b3d6b993-f74a-48db-b23d-102e47b4f09b {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 647.052391] env[65758]: WARNING nova.compute.manager [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Received unexpected event network-vif-plugged-b3d6b993-f74a-48db-b23d-102e47b4f09b for instance with vm_state building and task_state spawning. [ 647.052391] env[65758]: DEBUG nova.compute.manager [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Received event network-changed-b3d6b993-f74a-48db-b23d-102e47b4f09b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 647.052593] env[65758]: DEBUG nova.compute.manager [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Refreshing instance network info cache due to event network-changed-b3d6b993-f74a-48db-b23d-102e47b4f09b. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 647.052593] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Acquiring lock "refresh_cache-28ccc013-962d-4607-83a2-5fcd480c27b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.052795] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Acquired lock "refresh_cache-28ccc013-962d-4607-83a2-5fcd480c27b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.052836] env[65758]: DEBUG nova.network.neutron [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Refreshing network info cache for port b3d6b993-f74a-48db-b23d-102e47b4f09b {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 647.075096] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5250d154-d8a4-f177-2d38-8d92610c791b, 'name': SearchDatastore_Task, 'duration_secs': 0.019665} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.081773] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-982ad9b9-dae2-4fec-902d-430b4703d55b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.089688] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 647.089688] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc059b-f5a8-5fc2-3f91-833f757e702d" [ 647.089688] env[65758]: _type = "Task" [ 647.089688] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.099718] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc059b-f5a8-5fc2-3f91-833f757e702d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.111339] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659979, 'name': ReconfigVM_Task, 'duration_secs': 0.825141} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.112430] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 1e249ca9-a7a8-440f-832b-a8f5d84ada8b/1e249ca9-a7a8-440f-832b-a8f5d84ada8b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 647.112940] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cda1b29f-f165-40d8-acc4-5e258f0fda07 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.125193] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 647.125193] env[65758]: value = "task-4659983" [ 647.125193] env[65758]: _type = "Task" [ 647.125193] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.143396] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659983, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.232866] env[65758]: WARNING neutronclient.v2_0.client [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 647.233355] env[65758]: WARNING openstack [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 647.233690] env[65758]: WARNING openstack [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 647.256924] env[65758]: DEBUG nova.network.neutron [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Successfully updated port: f909dddc-4c03-4424-acfc-d0739864ec6e {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 647.362859] env[65758]: DEBUG nova.network.neutron [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance_info_cache with network_info: [{"id": "4741e651-cd1e-4ea0-b378-213efedb59d4", "address": "fa:16:3e:9f:a7:58", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4741e651-cd", "ovs_interfaceid": "4741e651-cd1e-4ea0-b378-213efedb59d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 647.395260] env[65758]: DEBUG nova.scheduler.client.report [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 647.536599] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659982, 'name': CreateVM_Task, 'duration_secs': 0.428363} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.536859] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 647.537459] env[65758]: WARNING neutronclient.v2_0.client [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 647.537866] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.538045] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.538396] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 647.538969] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81e799bc-06a2-4870-820b-3dbbf3637fb9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.545106] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 647.545106] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52760dce-31a2-9653-d33a-e4ae7658e616" [ 647.545106] env[65758]: _type = "Task" [ 647.545106] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.556608] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52760dce-31a2-9653-d33a-e4ae7658e616, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.558555] env[65758]: WARNING neutronclient.v2_0.client [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 647.559542] env[65758]: WARNING openstack [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 647.560135] env[65758]: WARNING openstack [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 647.606233] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc059b-f5a8-5fc2-3f91-833f757e702d, 'name': SearchDatastore_Task, 'duration_secs': 0.019071} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.606830] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.607285] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041/0ac196fa-d88c-45a8-999e-8b5216912041.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 647.607699] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20e242e6-61c9-4fab-80d2-65feac551ef6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.616574] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 647.616574] env[65758]: value = "task-4659984" [ 647.616574] env[65758]: _type = "Task" [ 647.616574] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.627972] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.645024] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659983, 'name': Rename_Task, 'duration_secs': 0.224315} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.645024] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 647.645024] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d503ffb8-7a25-4bdb-a358-a2d2d92853ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.655400] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 647.655400] env[65758]: value = "task-4659985" [ 647.655400] env[65758]: _type = "Task" [ 647.655400] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.666404] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659985, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.762949] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquiring lock "refresh_cache-83b637d8-b9fa-4159-b879-c1d737871539" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.762949] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquired lock "refresh_cache-83b637d8-b9fa-4159-b879-c1d737871539" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.762949] env[65758]: DEBUG nova.network.neutron [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 647.823670] env[65758]: WARNING neutronclient.v2_0.client [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 647.824445] env[65758]: WARNING openstack [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 647.824836] env[65758]: WARNING openstack [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 647.865112] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.900920] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.901668] env[65758]: DEBUG nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 647.904665] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.563s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.907521] env[65758]: INFO nova.compute.claims [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.951514] env[65758]: DEBUG nova.network.neutron [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Updated VIF entry in instance network info cache for port b3d6b993-f74a-48db-b23d-102e47b4f09b. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 647.952126] env[65758]: DEBUG nova.network.neutron [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Updating instance_info_cache with network_info: [{"id": "b3d6b993-f74a-48db-b23d-102e47b4f09b", "address": "fa:16:3e:7b:95:30", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3d6b993-f7", "ovs_interfaceid": "b3d6b993-f74a-48db-b23d-102e47b4f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 648.064975] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52760dce-31a2-9653-d33a-e4ae7658e616, 'name': SearchDatastore_Task, 'duration_secs': 0.019198} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.065165] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.065404] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 648.065820] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.065901] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.066274] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 648.066650] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2014e600-3c76-4b2e-a5a6-dff11070c652 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.082354] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 648.082729] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 648.084703] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4731a164-0219-46c5-8db5-98ffd5688e3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.096944] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 648.096944] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5204ecc1-99ec-76af-75f6-30a031a45bed" [ 648.096944] env[65758]: _type = "Task" [ 648.096944] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.121256] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5204ecc1-99ec-76af-75f6-30a031a45bed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.141278] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659984, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.171822] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659985, 'name': PowerOnVM_Task} progress is 76%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.266263] env[65758]: WARNING openstack [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 648.266960] env[65758]: WARNING openstack [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 648.329964] env[65758]: DEBUG nova.network.neutron [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 648.418700] env[65758]: DEBUG nova.compute.utils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 648.425525] env[65758]: DEBUG nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 648.425525] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 648.425525] env[65758]: WARNING neutronclient.v2_0.client [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 648.425525] env[65758]: WARNING neutronclient.v2_0.client [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 648.425886] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 648.426269] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 648.444171] env[65758]: WARNING neutronclient.v2_0.client [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 648.445095] env[65758]: WARNING openstack [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 648.445666] env[65758]: WARNING openstack [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 648.456739] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3f23703-3f36-47d7-80cb-81acae442119 req-c966ef33-0c66-437b-a444-b6782900f07e service nova] Releasing lock "refresh_cache-28ccc013-962d-4607-83a2-5fcd480c27b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.538595] env[65758]: DEBUG nova.policy [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b76c609db8940e3bdcda32d55fa93a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '237226a477354874a363a8670187a1a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 648.611515] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5204ecc1-99ec-76af-75f6-30a031a45bed, 'name': SearchDatastore_Task, 'duration_secs': 0.035481} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.614543] env[65758]: DEBUG nova.network.neutron [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Updating instance_info_cache with network_info: [{"id": "f909dddc-4c03-4424-acfc-d0739864ec6e", "address": "fa:16:3e:eb:79:44", "network": {"id": "2b71473c-7631-4e06-9859-9e0f5bde9089", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1005541126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f9334e3f62046f491e3defaac1653b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf909dddc-4c", "ovs_interfaceid": "f909dddc-4c03-4424-acfc-d0739864ec6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 648.618664] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-674d823d-2610-4dcb-9da6-31a473bc0f19 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.631335] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 648.631335] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f8bb0b-2d8a-da24-dc76-ca73867b1872" [ 648.631335] env[65758]: _type = "Task" [ 648.631335] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.640489] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57774} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.641652] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041/0ac196fa-d88c-45a8-999e-8b5216912041.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 648.643112] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 648.643112] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c70d34b2-3e71-4546-8254-1a233d1194e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.650887] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f8bb0b-2d8a-da24-dc76-ca73867b1872, 'name': SearchDatastore_Task, 'duration_secs': 0.011756} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.651790] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.652128] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 28ccc013-962d-4607-83a2-5fcd480c27b2/28ccc013-962d-4607-83a2-5fcd480c27b2.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 648.652490] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aeeeefe3-0d80-4d92-a43a-29d1942d6998 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.665575] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 648.665575] env[65758]: value = "task-4659986" [ 648.665575] env[65758]: _type = "Task" [ 648.665575] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.665575] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 648.665575] env[65758]: value = "task-4659987" [ 648.665575] env[65758]: _type = "Task" [ 648.665575] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.681381] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659985, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.695084] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "492d1063-8eaf-4207-8d65-341fbc0b6c39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.695419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "492d1063-8eaf-4207-8d65-341fbc0b6c39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.697522] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.697522] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.745711] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "03073968-e679-4ce5-9f84-c4765217b308" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.746353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "03073968-e679-4ce5-9f84-c4765217b308" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.784651] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "148eddf4-4c01-47bc-be81-451ca57e7347" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.784863] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "148eddf4-4c01-47bc-be81-451ca57e7347" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.942543] env[65758]: DEBUG nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 649.029576] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Successfully created port: d0a812a9-0121-493b-92c4-16221a927a6a {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 649.106321] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquiring lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.106470] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.120122] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Releasing lock "refresh_cache-83b637d8-b9fa-4159-b879-c1d737871539" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.120470] env[65758]: DEBUG nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Instance network_info: |[{"id": "f909dddc-4c03-4424-acfc-d0739864ec6e", "address": "fa:16:3e:eb:79:44", "network": {"id": "2b71473c-7631-4e06-9859-9e0f5bde9089", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1005541126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f9334e3f62046f491e3defaac1653b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf909dddc-4c", "ovs_interfaceid": "f909dddc-4c03-4424-acfc-d0739864ec6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 649.121228] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:79:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f909dddc-4c03-4424-acfc-d0739864ec6e', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 649.130170] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Creating folder: Project (5f9334e3f62046f491e3defaac1653b5). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 649.130989] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74f173f6-678a-4b88-b69d-1b14927dfb9b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.151021] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Created folder: Project (5f9334e3f62046f491e3defaac1653b5) in parent group-v909763. [ 649.151021] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Creating folder: Instances. Parent ref: group-v909819. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 649.154275] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99f40444-8f64-46c6-8b05-e46be8edad7a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.170270] env[65758]: DEBUG oslo_vmware.api [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4659985, 'name': PowerOnVM_Task, 'duration_secs': 1.172655} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.181229] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 649.181400] env[65758]: INFO nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Took 11.70 seconds to spawn the instance on the hypervisor. [ 649.181649] env[65758]: DEBUG nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 649.182012] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Created folder: Instances in parent group-v909819. [ 649.182246] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 649.186180] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327acc49-1845-42c5-bf7c-33f581d7a012 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.189275] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 649.190457] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2f8a50c-fbd5-4370-a7bc-c36a18d513b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.219186] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 649.222571] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088478} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.222571] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659987, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.225603] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 649.231805] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92a539e-76aa-4b69-ab79-81b245eb1fb4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.236563] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.236563] env[65758]: value = "task-4659990" [ 649.236563] env[65758]: _type = "Task" [ 649.236563] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.262358] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041/0ac196fa-d88c-45a8-999e-8b5216912041.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 649.266195] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-265f1f47-301a-4018-b327-fe4ea63ddf57 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.289940] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659990, 'name': CreateVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.299557] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 649.299557] env[65758]: value = "task-4659991" [ 649.299557] env[65758]: _type = "Task" [ 649.299557] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.317390] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659991, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.383220] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fe842a-104c-4659-9115-289bed1822e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.415280] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance '83fa942b-a195-4bcb-9ed5-5bb6764220a4' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 649.555221] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6d4699-c5cf-4ec8-8f09-9f6a6c13d844 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.565897] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8331139a-d8f1-44ee-ba0f-8e81b4e9b057 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.604881] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8749ab-a3b1-4b40-9f97-912ee15f8f38 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.616513] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea24b98-e7cb-4b76-9f2f-80273917587a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.637132] env[65758]: DEBUG nova.compute.provider_tree [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.685594] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659987, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.812013} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.685894] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 28ccc013-962d-4607-83a2-5fcd480c27b2/28ccc013-962d-4607-83a2-5fcd480c27b2.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 649.686226] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 649.686596] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e87e012e-1a32-49dc-a875-1d0521d35199 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.699576] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 649.699576] env[65758]: value = "task-4659992" [ 649.699576] env[65758]: _type = "Task" [ 649.699576] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.712288] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659992, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.747787] env[65758]: INFO nova.compute.manager [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Took 31.56 seconds to build instance. [ 649.753966] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659990, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.756632] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.813921] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659991, 'name': ReconfigVM_Task, 'duration_secs': 0.367278} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.815042] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Successfully created port: 026141b1-3811-4baa-8195-d418fa316270 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 649.817949] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041/0ac196fa-d88c-45a8-999e-8b5216912041.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 649.819361] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ee2f903-3349-4488-bfd8-9145aef3279a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.829623] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 649.829623] env[65758]: value = "task-4659993" [ 649.829623] env[65758]: _type = "Task" [ 649.829623] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.844832] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659993, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.879694] env[65758]: DEBUG nova.compute.manager [req-24a282d2-1b83-4023-9e6f-0532a9006d93 req-fd6199c6-08c7-42e0-9c05-063326825467 service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Received event network-vif-plugged-f909dddc-4c03-4424-acfc-d0739864ec6e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 649.882499] env[65758]: DEBUG oslo_concurrency.lockutils [req-24a282d2-1b83-4023-9e6f-0532a9006d93 req-fd6199c6-08c7-42e0-9c05-063326825467 service nova] Acquiring lock "83b637d8-b9fa-4159-b879-c1d737871539-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.882499] env[65758]: DEBUG oslo_concurrency.lockutils [req-24a282d2-1b83-4023-9e6f-0532a9006d93 req-fd6199c6-08c7-42e0-9c05-063326825467 service nova] Lock "83b637d8-b9fa-4159-b879-c1d737871539-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.882499] env[65758]: DEBUG oslo_concurrency.lockutils [req-24a282d2-1b83-4023-9e6f-0532a9006d93 req-fd6199c6-08c7-42e0-9c05-063326825467 service nova] Lock "83b637d8-b9fa-4159-b879-c1d737871539-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.882499] env[65758]: DEBUG nova.compute.manager [req-24a282d2-1b83-4023-9e6f-0532a9006d93 req-fd6199c6-08c7-42e0-9c05-063326825467 service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] No waiting events found dispatching network-vif-plugged-f909dddc-4c03-4424-acfc-d0739864ec6e {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 649.882499] env[65758]: WARNING nova.compute.manager [req-24a282d2-1b83-4023-9e6f-0532a9006d93 req-fd6199c6-08c7-42e0-9c05-063326825467 service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Received unexpected event network-vif-plugged-f909dddc-4c03-4424-acfc-d0739864ec6e for instance with vm_state building and task_state spawning. [ 649.921548] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 649.921548] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd212693-bd1c-48d3-900b-dcf48ea5a6cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.936061] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 649.936061] env[65758]: value = "task-4659994" [ 649.936061] env[65758]: _type = "Task" [ 649.936061] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.948603] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.955383] env[65758]: DEBUG nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 649.989360] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 649.990174] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 649.990174] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 649.990174] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 649.990174] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 649.990174] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 649.990442] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.990442] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 649.990690] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 649.990919] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 649.991134] env[65758]: DEBUG nova.virt.hardware [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 649.992008] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92674832-cadf-4179-a9c2-0cfda434bfe0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.004019] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766920e6-a491-47e3-87bb-bd852d470253 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.140780] env[65758]: DEBUG nova.scheduler.client.report [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 650.166174] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Successfully created port: acba5b06-e536-4848-be4e-db877af4d6ac {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 650.211547] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659992, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074455} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.211877] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 650.212844] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e531de-dca5-4191-b007-5267da689fc0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.241452] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 28ccc013-962d-4607-83a2-5fcd480c27b2/28ccc013-962d-4607-83a2-5fcd480c27b2.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 650.242242] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dfdaded-ea40-4122-817e-5b83bca0e1ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.260170] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a06286f0-79ad-4dca-af7b-ff3c22061dd2 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.141s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.271720] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659990, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.273338] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 650.273338] env[65758]: value = "task-4659995" [ 650.273338] env[65758]: _type = "Task" [ 650.273338] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.282802] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659995, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.341032] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659993, 'name': Rename_Task, 'duration_secs': 0.152553} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.341863] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 650.341863] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f5565d4-a9b8-4912-95bc-16b9c6841a7e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.349392] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 650.349392] env[65758]: value = "task-4659996" [ 650.349392] env[65758]: _type = "Task" [ 650.349392] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.358968] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.431950] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3473d791-8eb2-4e23-9982-5203a40f60bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.444078] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b380a436-6c78-4499-b10c-030c63bd1312 tempest-ServersAdminNegativeTestJSON-892396116 tempest-ServersAdminNegativeTestJSON-892396116-project-admin] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Suspending the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 650.446492] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-4e7e5ff6-7100-4a6b-b904-d57843a7909b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.448478] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.455313] env[65758]: DEBUG oslo_vmware.api [None req-b380a436-6c78-4499-b10c-030c63bd1312 tempest-ServersAdminNegativeTestJSON-892396116 tempest-ServersAdminNegativeTestJSON-892396116-project-admin] Waiting for the task: (returnval){ [ 650.455313] env[65758]: value = "task-4659997" [ 650.455313] env[65758]: _type = "Task" [ 650.455313] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.464641] env[65758]: DEBUG oslo_vmware.api [None req-b380a436-6c78-4499-b10c-030c63bd1312 tempest-ServersAdminNegativeTestJSON-892396116 tempest-ServersAdminNegativeTestJSON-892396116-project-admin] Task: {'id': task-4659997, 'name': SuspendVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.646667] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.647683] env[65758]: DEBUG nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 650.650638] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.868s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.652816] env[65758]: INFO nova.compute.claims [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.767164] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 650.777157] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659990, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.789755] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659995, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.870550] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659996, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.948301] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.966466] env[65758]: DEBUG oslo_vmware.api [None req-b380a436-6c78-4499-b10c-030c63bd1312 tempest-ServersAdminNegativeTestJSON-892396116 tempest-ServersAdminNegativeTestJSON-892396116-project-admin] Task: {'id': task-4659997, 'name': SuspendVM_Task} progress is 54%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.161567] env[65758]: DEBUG nova.compute.utils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 651.163393] env[65758]: DEBUG nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 651.164780] env[65758]: DEBUG nova.network.neutron [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 651.164780] env[65758]: WARNING neutronclient.v2_0.client [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 651.164780] env[65758]: WARNING neutronclient.v2_0.client [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 651.165508] env[65758]: WARNING openstack [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 651.166067] env[65758]: WARNING openstack [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 651.275325] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4659990, 'name': CreateVM_Task, 'duration_secs': 1.585697} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.275325] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 651.275325] env[65758]: WARNING neutronclient.v2_0.client [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 651.275325] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.275325] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.275562] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 651.275562] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed98c070-2cbd-4f61-b4d2-c52fd808bf09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.286326] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 651.286326] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520ead74-7c6f-1cf7-c8f3-c135f86199b8" [ 651.286326] env[65758]: _type = "Task" [ 651.286326] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.296841] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659995, 'name': ReconfigVM_Task, 'duration_secs': 0.557208} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.297397] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 28ccc013-962d-4607-83a2-5fcd480c27b2/28ccc013-962d-4607-83a2-5fcd480c27b2.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 651.298802] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f88d091e-2f58-45d4-a8dc-1cf5c50d9bbf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.305930] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520ead74-7c6f-1cf7-c8f3-c135f86199b8, 'name': SearchDatastore_Task, 'duration_secs': 0.012155} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.306848] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.308390] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.308626] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.308849] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.308991] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.309180] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 651.309508] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 651.309508] env[65758]: value = "task-4659998" [ 651.309508] env[65758]: _type = "Task" [ 651.309508] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.309730] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7eb6d0a7-f7ef-4420-a060-059e9d2459f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.321492] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659998, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.323386] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 651.323655] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 651.324444] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-241f1d42-c32d-4a37-8fdb-7c4138592881 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.331239] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 651.331239] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52cbf8f2-b7b2-7c3c-a8af-a9e5cc83e1b0" [ 651.331239] env[65758]: _type = "Task" [ 651.331239] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.342986] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52cbf8f2-b7b2-7c3c-a8af-a9e5cc83e1b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.361521] env[65758]: DEBUG oslo_vmware.api [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4659996, 'name': PowerOnVM_Task, 'duration_secs': 0.640841} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.361770] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 651.361970] env[65758]: DEBUG nova.compute.manager [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 651.363062] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ab3c76-c2b5-40d1-bd0a-4f7775ff4aba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.443289] env[65758]: DEBUG nova.policy [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a497ea02c1e4e87bbf0b6ac4e956081', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65a13c957f3f4521ba7862cf3de6c0c4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 651.452454] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4659994, 'name': PowerOffVM_Task, 'duration_secs': 1.279038} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.452777] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.452967] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance '83fa942b-a195-4bcb-9ed5-5bb6764220a4' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 651.467300] env[65758]: DEBUG oslo_vmware.api [None req-b380a436-6c78-4499-b10c-030c63bd1312 tempest-ServersAdminNegativeTestJSON-892396116 tempest-ServersAdminNegativeTestJSON-892396116-project-admin] Task: {'id': task-4659997, 'name': SuspendVM_Task, 'duration_secs': 0.931395} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.467612] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b380a436-6c78-4499-b10c-030c63bd1312 tempest-ServersAdminNegativeTestJSON-892396116 tempest-ServersAdminNegativeTestJSON-892396116-project-admin] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Suspended the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 651.467799] env[65758]: DEBUG nova.compute.manager [None req-b380a436-6c78-4499-b10c-030c63bd1312 tempest-ServersAdminNegativeTestJSON-892396116 tempest-ServersAdminNegativeTestJSON-892396116-project-admin] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 651.468693] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfc912d-496b-4e94-8e69-2b276302b93b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.680561] env[65758]: DEBUG nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 651.831715] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659998, 'name': Rename_Task, 'duration_secs': 0.263237} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.833376] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 651.836566] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05e724b4-efdf-4d4d-9cf6-850b28fa5166 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.846441] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52cbf8f2-b7b2-7c3c-a8af-a9e5cc83e1b0, 'name': SearchDatastore_Task, 'duration_secs': 0.015735} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.848999] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 651.848999] env[65758]: value = "task-4659999" [ 651.848999] env[65758]: _type = "Task" [ 651.848999] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.849459] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d189e484-09c2-4afa-9f76-26db81733411 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.862036] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Successfully updated port: d0a812a9-0121-493b-92c4-16221a927a6a {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 651.867920] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 651.867920] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52483ac8-28c2-c781-c8c8-176bf5ee8a16" [ 651.867920] env[65758]: _type = "Task" [ 651.867920] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.892141] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.892830] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52483ac8-28c2-c781-c8c8-176bf5ee8a16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.964895] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 651.965422] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 651.965422] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 651.965709] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 651.965851] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 651.966115] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 651.966442] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 651.966688] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 651.966948] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 651.967229] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 651.967501] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 651.979691] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2efad3c1-5087-4ed4-8d73-d4e9b2837038 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.002099] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 652.002099] env[65758]: value = "task-4660000" [ 652.002099] env[65758]: _type = "Task" [ 652.002099] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.017281] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660000, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.174994] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640654e5-ccac-4008-ab4d-a7dee0eac09a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.189739] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a10e36-9fe9-4e41-aaaf-b37e0892fd22 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.231036] env[65758]: DEBUG nova.network.neutron [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Successfully created port: acfd4b22-8660-461e-9e71-fa7134a7936e {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 652.235527] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83329101-9cec-4a57-8c9b-7f947e04489c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.247420] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc3df96-be89-4003-986f-9195ca16aa20 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.266219] env[65758]: DEBUG nova.compute.provider_tree [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.363905] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659999, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.383357] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52483ac8-28c2-c781-c8c8-176bf5ee8a16, 'name': SearchDatastore_Task, 'duration_secs': 0.044803} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.383470] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.383715] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 83b637d8-b9fa-4159-b879-c1d737871539/83b637d8-b9fa-4159-b879-c1d737871539.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 652.383955] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2011fc8a-1ae4-4478-bb6f-b961e9988002 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.394112] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 652.394112] env[65758]: value = "task-4660001" [ 652.394112] env[65758]: _type = "Task" [ 652.394112] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.403742] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.515785] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660000, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.697875] env[65758]: DEBUG nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 652.733903] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 652.734155] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.734301] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 652.734464] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.734609] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 652.734725] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 652.734957] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 652.737024] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 652.737227] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 652.737651] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 652.737866] env[65758]: DEBUG nova.virt.hardware [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 652.739310] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90432b2f-2fc9-468a-9a39-22a0a57c29f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.751872] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9ba316-db3e-4d8b-a1d8-779f43c66ac2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.769998] env[65758]: DEBUG nova.scheduler.client.report [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.866331] env[65758]: DEBUG oslo_vmware.api [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4659999, 'name': PowerOnVM_Task, 'duration_secs': 0.919872} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.866652] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 652.866969] env[65758]: INFO nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Took 10.78 seconds to spawn the instance on the hypervisor. [ 652.869551] env[65758]: DEBUG nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 652.870726] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0756407f-1b28-477d-ab69-c801d57bf9fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.907331] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660001, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.016146] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660000, 'name': ReconfigVM_Task, 'duration_secs': 0.517908} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.016467] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance '83fa942b-a195-4bcb-9ed5-5bb6764220a4' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 653.275450] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.275979] env[65758]: DEBUG nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 653.278662] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.690s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.278919] env[65758]: DEBUG nova.objects.instance [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lazy-loading 'resources' on Instance uuid 9e16d31b-e84c-448b-9d83-98cac49570a0 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 653.391696] env[65758]: INFO nova.compute.manager [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Took 28.07 seconds to build instance. [ 653.408391] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.908534} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.409597] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 83b637d8-b9fa-4159-b879-c1d737871539/83b637d8-b9fa-4159-b879-c1d737871539.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 653.409895] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 653.410270] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffa2197d-e90e-416f-b8a7-c350f55e4556 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.419845] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 653.419845] env[65758]: value = "task-4660002" [ 653.419845] env[65758]: _type = "Task" [ 653.419845] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.432053] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.523483] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 653.523757] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.523962] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 653.524155] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.524295] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 653.524432] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 653.524626] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 653.524796] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 653.525034] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 653.525208] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 653.525418] env[65758]: DEBUG nova.virt.hardware [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 653.531425] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Reconfiguring VM instance instance-00000007 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 653.531791] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7f848cc-68a4-484f-bf05-ea5b6a07f139 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.554396] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 653.554396] env[65758]: value = "task-4660003" [ 653.554396] env[65758]: _type = "Task" [ 653.554396] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.564850] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660003, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.785429] env[65758]: DEBUG nova.compute.utils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 653.786960] env[65758]: DEBUG nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 653.787177] env[65758]: DEBUG nova.network.neutron [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 653.787527] env[65758]: WARNING neutronclient.v2_0.client [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 653.787833] env[65758]: WARNING neutronclient.v2_0.client [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 653.788816] env[65758]: WARNING openstack [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 653.789211] env[65758]: WARNING openstack [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 653.894301] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1df8a2c-8bb7-446c-b732-6cc9d52b86ac tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "28ccc013-962d-4607-83a2-5fcd480c27b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.418s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.934997] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079056} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.938728] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.940193] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4653f4a-c057-40ad-a3d3-338382ebd220 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.966261] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 83b637d8-b9fa-4159-b879-c1d737871539/83b637d8-b9fa-4159-b879-c1d737871539.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.969764] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef1e1605-7ff7-4f42-81dd-1fe32d1f8a23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.994385] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 653.994385] env[65758]: value = "task-4660004" [ 653.994385] env[65758]: _type = "Task" [ 653.994385] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.013465] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660004, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.068097] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660003, 'name': ReconfigVM_Task, 'duration_secs': 0.337767} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.075033] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Reconfigured VM instance instance-00000007 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 654.075033] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c8cca1-d08b-4a8f-9338-44b0ce9b63d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.100331] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 83fa942b-a195-4bcb-9ed5-5bb6764220a4/83fa942b-a195-4bcb-9ed5-5bb6764220a4.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 654.103466] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbf90db7-57fa-4b97-808f-adfa38d0e10f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.126247] env[65758]: DEBUG nova.network.neutron [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Successfully updated port: acfd4b22-8660-461e-9e71-fa7134a7936e {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 654.135316] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 654.135316] env[65758]: value = "task-4660005" [ 654.135316] env[65758]: _type = "Task" [ 654.135316] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.150314] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.265096] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Successfully updated port: 026141b1-3811-4baa-8195-d418fa316270 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 654.297667] env[65758]: DEBUG nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 654.316335] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e434fc5-183f-47a2-8bf3-ab5cda941c59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.331804] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560265f4-c41e-4c2f-87fa-cb76f69b2634 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.372662] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60eb9fc8-72ef-422b-9314-bc861c80ef02 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.381836] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22b605d-e474-44e0-89a8-ca240a470961 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.399130] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 654.404037] env[65758]: DEBUG nova.compute.provider_tree [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 654.505560] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660004, 'name': ReconfigVM_Task, 'duration_secs': 0.310097} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.505839] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 83b637d8-b9fa-4159-b879-c1d737871539/83b637d8-b9fa-4159-b879-c1d737871539.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.506818] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d9ea9e8-cdb8-45f5-af20-fc095e25c177 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.514347] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 654.514347] env[65758]: value = "task-4660006" [ 654.514347] env[65758]: _type = "Task" [ 654.514347] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.524427] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660006, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.629295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquiring lock "refresh_cache-a2010738-759b-480a-8360-2639788056b1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.629525] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquired lock "refresh_cache-a2010738-759b-480a-8360-2639788056b1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.629705] env[65758]: DEBUG nova.network.neutron [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 654.647149] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660005, 'name': ReconfigVM_Task, 'duration_secs': 0.366746} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.647817] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 83fa942b-a195-4bcb-9ed5-5bb6764220a4/83fa942b-a195-4bcb-9ed5-5bb6764220a4.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.647817] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance '83fa942b-a195-4bcb-9ed5-5bb6764220a4' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 654.935105] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.938245] env[65758]: ERROR nova.scheduler.client.report [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] [req-21e7b5c6-d58d-4dd4-b4e0-c9ac6d9c579f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21e7b5c6-d58d-4dd4-b4e0-c9ac6d9c579f"}]} [ 654.958502] env[65758]: DEBUG nova.scheduler.client.report [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 654.981948] env[65758]: DEBUG nova.scheduler.client.report [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 654.982201] env[65758]: DEBUG nova.compute.provider_tree [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 654.995026] env[65758]: DEBUG nova.scheduler.client.report [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: None {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 655.019720] env[65758]: DEBUG nova.scheduler.client.report [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 655.034748] env[65758]: DEBUG nova.policy [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'beef1a3668014b1681b54a695325f0e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ca73ea9954543e38b16a12b37d531c6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 655.040206] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660006, 'name': Rename_Task, 'duration_secs': 0.155403} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.040702] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.043169] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8f17a11-3c01-4cd1-b81a-48c784b4e579 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.051509] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 655.051509] env[65758]: value = "task-4660007" [ 655.051509] env[65758]: _type = "Task" [ 655.051509] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.065342] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660007, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.136935] env[65758]: WARNING openstack [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 655.138281] env[65758]: WARNING openstack [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 655.156110] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63811781-9117-4117-ab26-287d01d20687 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.188196] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837d1f83-bbae-4ecf-9670-ab70aa87a8e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.209806] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance '83fa942b-a195-4bcb-9ed5-5bb6764220a4' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 655.310238] env[65758]: DEBUG nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 655.345700] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 655.345857] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.346079] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 655.346245] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.346743] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 655.346743] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 655.346743] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.346939] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 655.347093] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 655.347167] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 655.347330] env[65758]: DEBUG nova.virt.hardware [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 655.348320] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48130cc-c763-450f-b79d-328df239d9cd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.362057] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d8386e-84e1-4ebb-9fe2-21281f89cceb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.504581] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13554698-de83-492b-99c0-ed3c662d1c07 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.513144] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9df2fe-2a62-4804-8695-b8a107f4f686 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.552032] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8a5f3f-3692-40d8-86c4-0e358edb4887 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.562293] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb5404d-0bc8-46be-8c60-7d8480ec61e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.571043] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660007, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.582561] env[65758]: DEBUG nova.compute.provider_tree [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 655.721148] env[65758]: WARNING neutronclient.v2_0.client [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 655.721708] env[65758]: WARNING neutronclient.v2_0.client [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 656.070846] env[65758]: DEBUG oslo_vmware.api [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660007, 'name': PowerOnVM_Task, 'duration_secs': 0.748564} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.070846] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.071647] env[65758]: INFO nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Took 11.34 seconds to spawn the instance on the hypervisor. [ 656.071647] env[65758]: DEBUG nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 656.073678] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c06f1e7-c5ca-4ed2-9fb5-9154ede6e031 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.129260] env[65758]: DEBUG nova.scheduler.client.report [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 37 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 656.132137] env[65758]: DEBUG nova.compute.provider_tree [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 37 to 38 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 656.132137] env[65758]: DEBUG nova.compute.provider_tree [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 656.331281] env[65758]: DEBUG nova.network.neutron [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 656.365583] env[65758]: DEBUG nova.network.neutron [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Port 4741e651-cd1e-4ea0-b378-213efedb59d4 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 656.597054] env[65758]: INFO nova.compute.manager [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Took 30.50 seconds to build instance. [ 656.641885] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.363s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.644958] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.687s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.647708] env[65758]: INFO nova.compute.claims [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.651700] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Successfully updated port: acba5b06-e536-4848-be4e-db877af4d6ac {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 656.688137] env[65758]: INFO nova.scheduler.client.report [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Deleted allocations for instance 9e16d31b-e84c-448b-9d83-98cac49570a0 [ 656.765400] env[65758]: DEBUG nova.network.neutron [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Successfully created port: 872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 656.850017] env[65758]: WARNING neutronclient.v2_0.client [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 656.850732] env[65758]: WARNING openstack [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 656.851138] env[65758]: WARNING openstack [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 656.991487] env[65758]: DEBUG nova.network.neutron [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Updating instance_info_cache with network_info: [{"id": "acfd4b22-8660-461e-9e71-fa7134a7936e", "address": "fa:16:3e:cd:71:71", "network": {"id": "b8727d6d-5390-46a5-8f17-d072022f93b9", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-137815059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "65a13c957f3f4521ba7862cf3de6c0c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacfd4b22-86", "ovs_interfaceid": "acfd4b22-8660-461e-9e71-fa7134a7936e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 657.102618] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6cf29adb-d9d4-43aa-94d1-04eafbc16d56 tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "83b637d8-b9fa-4159-b879-c1d737871539" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.021s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.157604] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.157847] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquired lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.158090] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 657.198130] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26220ab0-75ab-4a13-848e-b51605af127b tempest-ServerDiagnosticsV248Test-865679339 tempest-ServerDiagnosticsV248Test-865679339-project-member] Lock "9e16d31b-e84c-448b-9d83-98cac49570a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.067s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.389378] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.389615] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.389783] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.497683] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Releasing lock "refresh_cache-a2010738-759b-480a-8360-2639788056b1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.498077] env[65758]: DEBUG nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Instance network_info: |[{"id": "acfd4b22-8660-461e-9e71-fa7134a7936e", "address": "fa:16:3e:cd:71:71", "network": {"id": "b8727d6d-5390-46a5-8f17-d072022f93b9", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-137815059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "65a13c957f3f4521ba7862cf3de6c0c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacfd4b22-86", "ovs_interfaceid": "acfd4b22-8660-461e-9e71-fa7134a7936e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 657.498562] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:71:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2c68e7-b690-42e2-9491-c3f9357cc66a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acfd4b22-8660-461e-9e71-fa7134a7936e', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 657.508155] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Creating folder: Project (65a13c957f3f4521ba7862cf3de6c0c4). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.508155] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-778cc581-9e5a-40e8-8006-d8e86f5812c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.521192] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Created folder: Project (65a13c957f3f4521ba7862cf3de6c0c4) in parent group-v909763. [ 657.521413] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Creating folder: Instances. Parent ref: group-v909822. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.521661] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5621608-9e79-4ef5-92c6-642366909b01 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.533540] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Created folder: Instances in parent group-v909822. [ 657.533805] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 657.534127] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2010738-759b-480a-8360-2639788056b1] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 657.534372] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9ced3eb-d6b3-4a3a-ba26-c8b704a386ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.556119] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 657.556119] env[65758]: value = "task-4660010" [ 657.556119] env[65758]: _type = "Task" [ 657.556119] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.565387] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660010, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.605787] env[65758]: DEBUG nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 657.665352] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 657.665695] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 657.777468] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 657.966051] env[65758]: WARNING neutronclient.v2_0.client [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 657.966051] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 657.966051] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.073035] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660010, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.129273] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.141617] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d6489a-6096-4ee6-80bc-6d9ef0c200cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.152331] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc552ca7-ada4-4dc4-9b9e-c249e4183aef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.183525] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c1d365-d746-4bbb-8774-08bd0f68ecaa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.192947] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebd9a7b-82c7-4035-ab17-d461bf4b4f08 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.207806] env[65758]: DEBUG nova.compute.provider_tree [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.220743] env[65758]: WARNING neutronclient.v2_0.client [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.221474] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.221843] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.276770] env[65758]: DEBUG nova.compute.manager [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Received event network-changed-f909dddc-4c03-4424-acfc-d0739864ec6e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 658.278330] env[65758]: DEBUG nova.compute.manager [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Refreshing instance network info cache due to event network-changed-f909dddc-4c03-4424-acfc-d0739864ec6e. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 658.278330] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Acquiring lock "refresh_cache-83b637d8-b9fa-4159-b879-c1d737871539" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.278330] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Acquired lock "refresh_cache-83b637d8-b9fa-4159-b879-c1d737871539" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.278330] env[65758]: DEBUG nova.network.neutron [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Refreshing network info cache for port f909dddc-4c03-4424-acfc-d0739864ec6e {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 658.393959] env[65758]: WARNING neutronclient.v2_0.client [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.438242] env[65758]: DEBUG nova.network.neutron [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Successfully updated port: 872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 658.509139] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.509344] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.509521] env[65758]: DEBUG nova.network.neutron [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 658.511105] env[65758]: INFO nova.compute.manager [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Rebuilding instance [ 658.570355] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660010, 'name': CreateVM_Task, 'duration_secs': 0.588926} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.570355] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2010738-759b-480a-8360-2639788056b1] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 658.571649] env[65758]: WARNING neutronclient.v2_0.client [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.571649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.571649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.572050] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 658.575433] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-959d7d19-8817-43b4-b583-f5736b52bc75 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.580429] env[65758]: DEBUG nova.compute.manager [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 658.581304] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a076b7-db73-4a99-9afd-575b0df0e0be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.586560] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 658.586560] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b8a8f8-39ea-170c-5cf8-fea2a826b07d" [ 658.586560] env[65758]: _type = "Task" [ 658.586560] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.602477] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b8a8f8-39ea-170c-5cf8-fea2a826b07d, 'name': SearchDatastore_Task, 'duration_secs': 0.011236} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.602779] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.603040] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 658.603292] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.603443] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.603623] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 658.603904] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e37176e6-5254-4965-892c-2cacc54a06b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.614863] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 658.615071] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 658.615841] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-754b2647-8cd0-4f39-b00d-9da73120f3e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.622315] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 658.622315] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529b7b3d-c505-fe34-2005-63631bad2609" [ 658.622315] env[65758]: _type = "Task" [ 658.622315] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.631848] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529b7b3d-c505-fe34-2005-63631bad2609, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.633780] env[65758]: WARNING neutronclient.v2_0.client [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.634504] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.635705] env[65758]: WARNING openstack [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.699418] env[65758]: DEBUG nova.compute.manager [req-8314bca3-c4f6-4980-8c15-00cfdc62241e req-328d4e51-0094-4031-9d73-1d00d87a6f27 service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-vif-plugged-026141b1-3811-4baa-8195-d418fa316270 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 658.699555] env[65758]: DEBUG oslo_concurrency.lockutils [req-8314bca3-c4f6-4980-8c15-00cfdc62241e req-328d4e51-0094-4031-9d73-1d00d87a6f27 service nova] Acquiring lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.700091] env[65758]: DEBUG oslo_concurrency.lockutils [req-8314bca3-c4f6-4980-8c15-00cfdc62241e req-328d4e51-0094-4031-9d73-1d00d87a6f27 service nova] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.700366] env[65758]: DEBUG oslo_concurrency.lockutils [req-8314bca3-c4f6-4980-8c15-00cfdc62241e req-328d4e51-0094-4031-9d73-1d00d87a6f27 service nova] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.700738] env[65758]: DEBUG nova.compute.manager [req-8314bca3-c4f6-4980-8c15-00cfdc62241e req-328d4e51-0094-4031-9d73-1d00d87a6f27 service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] No waiting events found dispatching network-vif-plugged-026141b1-3811-4baa-8195-d418fa316270 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 658.700738] env[65758]: WARNING nova.compute.manager [req-8314bca3-c4f6-4980-8c15-00cfdc62241e req-328d4e51-0094-4031-9d73-1d00d87a6f27 service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received unexpected event network-vif-plugged-026141b1-3811-4baa-8195-d418fa316270 for instance with vm_state building and task_state spawning. [ 658.711270] env[65758]: DEBUG nova.scheduler.client.report [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 658.780792] env[65758]: WARNING neutronclient.v2_0.client [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 658.781768] env[65758]: WARNING openstack [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 658.782319] env[65758]: WARNING openstack [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 658.903853] env[65758]: DEBUG nova.network.neutron [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Updating instance_info_cache with network_info: [{"id": "d0a812a9-0121-493b-92c4-16221a927a6a", "address": "fa:16:3e:92:b8:aa", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a812a9-01", "ovs_interfaceid": "d0a812a9-0121-493b-92c4-16221a927a6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "026141b1-3811-4baa-8195-d418fa316270", "address": "fa:16:3e:f7:60:d4", "network": {"id": "bfd8cc95-fa08-4bc9-976c-adfbd4c45ea9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2004041920", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.115", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap026141b1-38", "ovs_interfaceid": "026141b1-3811-4baa-8195-d418fa316270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "acba5b06-e536-4848-be4e-db877af4d6ac", "address": "fa:16:3e:11:b1:da", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacba5b06-e5", "ovs_interfaceid": "acba5b06-e536-4848-be4e-db877af4d6ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 658.941935] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquiring lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.942282] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquired lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.942531] env[65758]: DEBUG nova.network.neutron [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 659.014861] env[65758]: WARNING neutronclient.v2_0.client [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.015633] env[65758]: WARNING openstack [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.015992] env[65758]: WARNING openstack [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.138107] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529b7b3d-c505-fe34-2005-63631bad2609, 'name': SearchDatastore_Task, 'duration_secs': 0.011676} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.139080] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75288ada-6351-42b3-809d-a35fa62562c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.145769] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 659.145769] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ecb070-8b3f-ffb3-d140-fe6e8f975e39" [ 659.145769] env[65758]: _type = "Task" [ 659.145769] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.158735] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ecb070-8b3f-ffb3-d140-fe6e8f975e39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.212607] env[65758]: WARNING neutronclient.v2_0.client [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.213297] env[65758]: WARNING openstack [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.213653] env[65758]: WARNING openstack [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.227322] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.228026] env[65758]: DEBUG nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 659.231610] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.675s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.231852] env[65758]: DEBUG nova.objects.instance [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lazy-loading 'resources' on Instance uuid 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 659.346527] env[65758]: DEBUG nova.network.neutron [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Updated VIF entry in instance network info cache for port f909dddc-4c03-4424-acfc-d0739864ec6e. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 659.346902] env[65758]: DEBUG nova.network.neutron [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Updating instance_info_cache with network_info: [{"id": "f909dddc-4c03-4424-acfc-d0739864ec6e", "address": "fa:16:3e:eb:79:44", "network": {"id": "2b71473c-7631-4e06-9859-9e0f5bde9089", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1005541126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f9334e3f62046f491e3defaac1653b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf909dddc-4c", "ovs_interfaceid": "f909dddc-4c03-4424-acfc-d0739864ec6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 659.374812] env[65758]: WARNING neutronclient.v2_0.client [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.375675] env[65758]: WARNING openstack [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.376241] env[65758]: WARNING openstack [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.408841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Releasing lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.409789] env[65758]: DEBUG nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Instance network_info: |[{"id": "d0a812a9-0121-493b-92c4-16221a927a6a", "address": "fa:16:3e:92:b8:aa", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a812a9-01", "ovs_interfaceid": "d0a812a9-0121-493b-92c4-16221a927a6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "026141b1-3811-4baa-8195-d418fa316270", "address": "fa:16:3e:f7:60:d4", "network": {"id": "bfd8cc95-fa08-4bc9-976c-adfbd4c45ea9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2004041920", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.115", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap026141b1-38", "ovs_interfaceid": "026141b1-3811-4baa-8195-d418fa316270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "acba5b06-e536-4848-be4e-db877af4d6ac", "address": "fa:16:3e:11:b1:da", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacba5b06-e5", "ovs_interfaceid": "acba5b06-e536-4848-be4e-db877af4d6ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 659.410566] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:b8:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0a812a9-0121-493b-92c4-16221a927a6a', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:60:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '026141b1-3811-4baa-8195-d418fa316270', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:b1:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acba5b06-e536-4848-be4e-db877af4d6ac', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 659.421936] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Creating folder: Project (237226a477354874a363a8670187a1a9). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.422361] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6bda229-52b6-407f-ab57-603f78077707 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.438731] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Created folder: Project (237226a477354874a363a8670187a1a9) in parent group-v909763. [ 659.438731] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Creating folder: Instances. Parent ref: group-v909825. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.438731] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7adb095-9d91-4016-98e2-0651b1b1fc31 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.448497] env[65758]: WARNING openstack [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.449416] env[65758]: WARNING openstack [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.459981] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Created folder: Instances in parent group-v909825. [ 659.460254] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 659.460453] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 659.460668] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1ea5152-d7f8-42cf-bb5e-fd548c3412e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.494337] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.494337] env[65758]: value = "task-4660013" [ 659.494337] env[65758]: _type = "Task" [ 659.494337] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.500120] env[65758]: DEBUG nova.network.neutron [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 659.509043] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660013, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.537429] env[65758]: DEBUG nova.network.neutron [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance_info_cache with network_info: [{"id": "4741e651-cd1e-4ea0-b378-213efedb59d4", "address": "fa:16:3e:9f:a7:58", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4741e651-cd", "ovs_interfaceid": "4741e651-cd1e-4ea0-b378-213efedb59d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 659.604965] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 659.604965] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bdfbbd8-eb94-4c39-924d-a0115a689009 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.613809] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 659.613809] env[65758]: value = "task-4660014" [ 659.613809] env[65758]: _type = "Task" [ 659.613809] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.629661] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660014, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.639051] env[65758]: DEBUG nova.compute.manager [None req-b8f19fb2-1914-498a-b74f-c5d4a7b2e3b0 tempest-ServerDiagnosticsTest-330201739 tempest-ServerDiagnosticsTest-330201739-project-admin] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 659.640562] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b47e63-90ac-4b51-978a-a073c4230537 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.644671] env[65758]: WARNING neutronclient.v2_0.client [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.645388] env[65758]: WARNING openstack [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.645715] env[65758]: WARNING openstack [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.664109] env[65758]: INFO nova.compute.manager [None req-b8f19fb2-1914-498a-b74f-c5d4a7b2e3b0 tempest-ServerDiagnosticsTest-330201739 tempest-ServerDiagnosticsTest-330201739-project-admin] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Retrieving diagnostics [ 659.665635] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2faec008-e08a-47fb-9888-80dace60436a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.673587] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ecb070-8b3f-ffb3-d140-fe6e8f975e39, 'name': SearchDatastore_Task, 'duration_secs': 0.012237} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.674333] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.674618] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a2010738-759b-480a-8360-2639788056b1/a2010738-759b-480a-8360-2639788056b1.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 659.674980] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1c802e6-51a7-4714-8b96-8e9d12733724 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.720916] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 659.720916] env[65758]: value = "task-4660015" [ 659.720916] env[65758]: _type = "Task" [ 659.720916] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.731498] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660015, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.737299] env[65758]: DEBUG nova.compute.utils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 659.742327] env[65758]: DEBUG nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 659.744038] env[65758]: DEBUG nova.network.neutron [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 659.744038] env[65758]: WARNING neutronclient.v2_0.client [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.744238] env[65758]: WARNING neutronclient.v2_0.client [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 659.744857] env[65758]: WARNING openstack [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 659.745368] env[65758]: WARNING openstack [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 659.781757] env[65758]: DEBUG nova.network.neutron [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Updating instance_info_cache with network_info: [{"id": "872949b5-9bac-4f83-acec-93e23be464c5", "address": "fa:16:3e:f8:7d:e0", "network": {"id": "115e8c49-6d73-405e-a185-9072fb560eb2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1829868231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ca73ea9954543e38b16a12b37d531c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap872949b5-9b", "ovs_interfaceid": "872949b5-9bac-4f83-acec-93e23be464c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 659.838217] env[65758]: DEBUG nova.policy [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5a46f92e344735a697b4f9f07fb536', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aab1df827abb49b88b951d30ba485d39', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.851028] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Releasing lock "refresh_cache-83b637d8-b9fa-4159-b879-c1d737871539" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.851544] env[65758]: DEBUG nova.compute.manager [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-vif-plugged-d0a812a9-0121-493b-92c4-16221a927a6a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 659.851700] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Acquiring lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.851777] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.851915] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.852090] env[65758]: DEBUG nova.compute.manager [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] No waiting events found dispatching network-vif-plugged-d0a812a9-0121-493b-92c4-16221a927a6a {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 659.852247] env[65758]: WARNING nova.compute.manager [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received unexpected event network-vif-plugged-d0a812a9-0121-493b-92c4-16221a927a6a for instance with vm_state building and task_state spawning. [ 659.852553] env[65758]: DEBUG nova.compute.manager [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-changed-d0a812a9-0121-493b-92c4-16221a927a6a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 659.852667] env[65758]: DEBUG nova.compute.manager [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Refreshing instance network info cache due to event network-changed-d0a812a9-0121-493b-92c4-16221a927a6a. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 659.852967] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Acquiring lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.853043] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Acquired lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.853197] env[65758]: DEBUG nova.network.neutron [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Refreshing network info cache for port d0a812a9-0121-493b-92c4-16221a927a6a {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 660.005523] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660013, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.040862] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.133204] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660014, 'name': PowerOffVM_Task, 'duration_secs': 0.161161} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.133526] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 660.133759] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 660.134760] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a276e3-8861-4a9d-ac34-3195e4fbc1da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.149663] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquiring lock "2bd02c6d-a139-4259-8b28-eed5efc5d094" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.150178] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "2bd02c6d-a139-4259-8b28-eed5efc5d094" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.154602] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 660.155381] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb1ddd5a-aaf4-4c44-989a-01c589f3f0bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.187131] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 660.187131] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 660.187382] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Deleting the datastore file [datastore1] 24379189-b10a-4ef6-a3f6-b7bb43029dab {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 660.187588] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d807541c-ded8-4ab5-b49d-f83d1374538f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.196813] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 660.196813] env[65758]: value = "task-4660017" [ 660.196813] env[65758]: _type = "Task" [ 660.196813] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.210428] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.234247] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660015, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.256075] env[65758]: DEBUG nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 660.286659] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Releasing lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.286659] env[65758]: DEBUG nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Instance network_info: |[{"id": "872949b5-9bac-4f83-acec-93e23be464c5", "address": "fa:16:3e:f8:7d:e0", "network": {"id": "115e8c49-6d73-405e-a185-9072fb560eb2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1829868231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ca73ea9954543e38b16a12b37d531c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap872949b5-9b", "ovs_interfaceid": "872949b5-9bac-4f83-acec-93e23be464c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 660.286659] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:7d:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '872949b5-9bac-4f83-acec-93e23be464c5', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.298033] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Creating folder: Project (9ca73ea9954543e38b16a12b37d531c6). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.301721] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-742c65a1-7952-49fc-8fc4-76ac4a588c46 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.316042] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Created folder: Project (9ca73ea9954543e38b16a12b37d531c6) in parent group-v909763. [ 660.316292] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Creating folder: Instances. Parent ref: group-v909828. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.316626] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4eb3aa50-83c7-4a3b-81d9-4dc04fb2255c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.331058] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Created folder: Instances in parent group-v909828. [ 660.331058] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 660.331058] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 660.331478] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bcb5cf2-c455-4d77-92c8-7fede4305778 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.348933] env[65758]: DEBUG nova.network.neutron [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Successfully created port: 8c662ce6-206b-49ce-836c-0bbc9792f182 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 660.356885] env[65758]: WARNING neutronclient.v2_0.client [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 660.357903] env[65758]: WARNING openstack [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 660.358320] env[65758]: WARNING openstack [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 660.372225] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.372225] env[65758]: value = "task-4660020" [ 660.372225] env[65758]: _type = "Task" [ 660.372225] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.379627] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5eddd5-71ed-4591-addd-4de4bb07403c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.388169] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660020, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.391125] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8ac6be-242c-4b03-8574-c182876283e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.193552] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660013, 'name': CreateVM_Task, 'duration_secs': 0.655259} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.194107] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235916} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.194322] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660015, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654154} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.195940] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 661.196687] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91936687-1892-4bbc-bf42-7c287aa029de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.199281] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 661.199520] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 661.199743] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 661.202269] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a2010738-759b-480a-8360-2639788056b1/a2010738-759b-480a-8360-2639788056b1.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 661.202473] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.203234] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7591330b-821a-431f-b7bf-2299805c81b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.207433] env[65758]: WARNING neutronclient.v2_0.client [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 661.207836] env[65758]: WARNING neutronclient.v2_0.client [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 661.208135] env[65758]: WARNING neutronclient.v2_0.client [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 661.208434] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.208579] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.208970] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 661.209595] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ad2af47-0490-44f1-9a19-691435c568ff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.215032] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4928b5f5-9e6f-4324-994f-d75fb6149954 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.217864] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660020, 'name': CreateVM_Task, 'duration_secs': 0.641633} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.218512] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 661.219486] env[65758]: WARNING neutronclient.v2_0.client [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 661.219907] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.240122] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb04c83-7eb8-4d57-b8e0-2baa49f95682 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.246499] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 661.246499] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521431d4-5d2c-09e6-b915-15a0ae1278bc" [ 661.246499] env[65758]: _type = "Task" [ 661.246499] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.247439] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 661.247439] env[65758]: value = "task-4660021" [ 661.247439] env[65758]: _type = "Task" [ 661.247439] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.248093] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a9e22e-5155-4530-805c-d2691af594d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.270582] env[65758]: DEBUG nova.compute.provider_tree [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.276454] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance '83fa942b-a195-4bcb-9ed5-5bb6764220a4' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 661.290058] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660021, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.290503] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521431d4-5d2c-09e6-b915-15a0ae1278bc, 'name': SearchDatastore_Task, 'duration_secs': 0.012154} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.291515] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.291880] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 661.292267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.292517] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.292809] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 661.293253] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.293766] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 661.294135] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f24ba56-b9a0-4ce9-9a9f-5987b27c6b31 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.298517] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31c2b7e9-0634-4d12-89bd-eb4fc3db1aa4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.307410] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 661.307410] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52105751-ff81-e416-997e-b1ab5b6bfdcb" [ 661.307410] env[65758]: _type = "Task" [ 661.307410] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.311866] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 661.312116] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 661.313335] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ea33c59-4a4b-418b-848d-03c3a65f3df3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.321128] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52105751-ff81-e416-997e-b1ab5b6bfdcb, 'name': SearchDatastore_Task, 'duration_secs': 0.012189} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.322661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.322764] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 661.323455] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.323455] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 661.323455] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d617f5-57de-1ddd-38c6-75758c2e4eae" [ 661.323455] env[65758]: _type = "Task" [ 661.323455] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.332822] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d617f5-57de-1ddd-38c6-75758c2e4eae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.655415] env[65758]: DEBUG nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 661.687664] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 661.687664] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 661.687664] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 661.687906] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 661.688032] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 661.688301] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 661.688434] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 661.688581] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 661.688715] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 661.688977] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 661.689237] env[65758]: DEBUG nova.virt.hardware [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 661.690496] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b0afdf-0b54-4464-80d7-bbab4aedf88f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.700232] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07696e0-0a11-4125-9013-badb49c50147 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.762126] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660021, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080009} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.762421] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.765540] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc4f8f5-600e-467e-9ae1-7596a7b24526 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.780518] env[65758]: DEBUG nova.scheduler.client.report [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 661.793314] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] a2010738-759b-480a-8360-2639788056b1/a2010738-759b-480a-8360-2639788056b1.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.796430] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.796602] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ba132ba-eb14-48b7-a0ee-6f3fd5ce5c8c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.012635] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f68ae3b8-725a-4eb0-a433-a280c2cccccd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.012635] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 662.012635] env[65758]: value = "task-4660022" [ 662.012635] env[65758]: _type = "Task" [ 662.012635] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.012635] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 662.012635] env[65758]: value = "task-4660023" [ 662.012635] env[65758]: _type = "Task" [ 662.012635] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.012635] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d617f5-57de-1ddd-38c6-75758c2e4eae, 'name': SearchDatastore_Task, 'duration_secs': 0.011538} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.012635] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb3c8b7b-01d7-4469-8dfa-69ee51bed8b3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.012635] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.012635] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660022, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.012635] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 662.012635] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c439aa-1bc4-421a-52eb-b270b264ba7c" [ 662.012635] env[65758]: _type = "Task" [ 662.012635] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.012635] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c439aa-1bc4-421a-52eb-b270b264ba7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.170334] env[65758]: DEBUG nova.network.neutron [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Successfully updated port: 8c662ce6-206b-49ce-836c-0bbc9792f182 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 662.224323] env[65758]: WARNING neutronclient.v2_0.client [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 662.224973] env[65758]: WARNING openstack [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 662.225337] env[65758]: WARNING openstack [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 662.254380] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 662.254698] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.254911] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 662.255840] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.255840] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 662.255840] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 662.256029] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.256184] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 662.256499] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 662.256694] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 662.256899] env[65758]: DEBUG nova.virt.hardware [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 662.259263] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5488a2d1-2934-48ac-a347-6d3b041a545e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.269434] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92106a83-8094-42a4-9f82-1d319dc29603 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.287021] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.293184] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 662.293184] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.293425] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2804d344-4a80-4814-9e40-598c95a9174a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.309803] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.075s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.309803] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.600s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.313979] env[65758]: INFO nova.compute.claims [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.323592] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.323592] env[65758]: value = "task-4660024" [ 662.323592] env[65758]: _type = "Task" [ 662.323592] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.332831] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660024, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.350327] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660022, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.350532] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660023, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.357686] env[65758]: INFO nova.scheduler.client.report [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Deleted allocations for instance 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b [ 662.374580] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c439aa-1bc4-421a-52eb-b270b264ba7c, 'name': SearchDatastore_Task, 'duration_secs': 0.017259} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.374580] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.375154] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 483765b5-c63c-4aac-9082-519bbc4e6eb5/483765b5-c63c-4aac-9082-519bbc4e6eb5.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 662.375154] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.375343] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.375538] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd94899d-9b28-4c98-afa2-7c9993ffd50a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.378263] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e75088a1-c906-43c8-886e-415a5dbb8636 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.391987] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 662.391987] env[65758]: value = "task-4660025" [ 662.391987] env[65758]: _type = "Task" [ 662.391987] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.395416] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.395682] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 662.403126] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b0bb96e-29ed-45c6-9be7-741f9cc832b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.412105] env[65758]: DEBUG nova.network.neutron [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Updated VIF entry in instance network info cache for port d0a812a9-0121-493b-92c4-16221a927a6a. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 662.413061] env[65758]: DEBUG nova.network.neutron [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Updating instance_info_cache with network_info: [{"id": "d0a812a9-0121-493b-92c4-16221a927a6a", "address": "fa:16:3e:92:b8:aa", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a812a9-01", "ovs_interfaceid": "d0a812a9-0121-493b-92c4-16221a927a6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "026141b1-3811-4baa-8195-d418fa316270", "address": "fa:16:3e:f7:60:d4", "network": {"id": "bfd8cc95-fa08-4bc9-976c-adfbd4c45ea9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2004041920", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.115", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap026141b1-38", "ovs_interfaceid": "026141b1-3811-4baa-8195-d418fa316270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "acba5b06-e536-4848-be4e-db877af4d6ac", "address": "fa:16:3e:11:b1:da", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacba5b06-e5", "ovs_interfaceid": "acba5b06-e536-4848-be4e-db877af4d6ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 662.419810] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 662.419810] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524e79cf-186b-8a86-82cd-a57e4d65031a" [ 662.419810] env[65758]: _type = "Task" [ 662.419810] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.423686] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.436490] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524e79cf-186b-8a86-82cd-a57e4d65031a, 'name': SearchDatastore_Task, 'duration_secs': 0.012543} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.437760] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f7b304b-2c46-459f-9ccf-652a94a8e512 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.446081] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 662.446081] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5290d9db-52c9-b397-1771-bc8122ece3d3" [ 662.446081] env[65758]: _type = "Task" [ 662.446081] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.455877] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5290d9db-52c9-b397-1771-bc8122ece3d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.679428] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "refresh_cache-24016efd-cdb3-4c1e-9c08-8643400e729e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.679428] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquired lock "refresh_cache-24016efd-cdb3-4c1e-9c08-8643400e729e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.679428] env[65758]: DEBUG nova.network.neutron [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 662.839838] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660024, 'name': CreateVM_Task, 'duration_secs': 0.410274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.851565] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 662.851565] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.851835] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.851955] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 662.853680] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2087fb04-5508-405b-b9ee-ea23ca147ea1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.863856] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660023, 'name': ReconfigVM_Task, 'duration_secs': 0.527063} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.864152] env[65758]: DEBUG oslo_vmware.api [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660022, 'name': PowerOnVM_Task, 'duration_secs': 0.87986} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.868454] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Reconfigured VM instance instance-00000016 to attach disk [datastore2] a2010738-759b-480a-8360-2639788056b1/a2010738-759b-480a-8360-2639788056b1.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 662.870415] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 662.870918] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e558a3-3de7-45b0-9811-bc9e693f2893 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance '83fa942b-a195-4bcb-9ed5-5bb6764220a4' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 662.876480] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 662.876480] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527cd2ab-76bb-7705-357f-88f4c93842fe" [ 662.876480] env[65758]: _type = "Task" [ 662.876480] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.879830] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bbce398-2049-4dc0-9418-a08ae0d17ce9 tempest-VolumesAdminNegativeTest-1761109407 tempest-VolumesAdminNegativeTest-1761109407-project-member] Lock "64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.339s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.881525] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f5dc95d-3b57-4d55-be47-886dd070d5f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.896044] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527cd2ab-76bb-7705-357f-88f4c93842fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.902268] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 662.902268] env[65758]: value = "task-4660026" [ 662.902268] env[65758]: _type = "Task" [ 662.902268] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.913210] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660025, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.921859] env[65758]: DEBUG oslo_concurrency.lockutils [req-5aeb8cf9-be3a-4d5d-b166-1d0cb0fb28f4 req-04f6dcd5-33f7-4fdb-8aba-7c1176fdc66b service nova] Releasing lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.921859] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660026, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.959076] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5290d9db-52c9-b397-1771-bc8122ece3d3, 'name': SearchDatastore_Task, 'duration_secs': 0.012718} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.959245] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.959528] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 0addcbb1-3561-4c93-b714-37e6b613b962/0addcbb1-3561-4c93-b714-37e6b613b962.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 662.959843] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00a4ba6b-37fc-42c6-94d6-4fab2d4ce7ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.969241] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 662.969241] env[65758]: value = "task-4660027" [ 662.969241] env[65758]: _type = "Task" [ 662.969241] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.980378] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.149418] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: a2010738-759b-480a-8360-2639788056b1] Received event network-vif-plugged-acfd4b22-8660-461e-9e71-fa7134a7936e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 663.149528] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Acquiring lock "a2010738-759b-480a-8360-2639788056b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.151670] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Lock "a2010738-759b-480a-8360-2639788056b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.151879] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Lock "a2010738-759b-480a-8360-2639788056b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.152290] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: a2010738-759b-480a-8360-2639788056b1] No waiting events found dispatching network-vif-plugged-acfd4b22-8660-461e-9e71-fa7134a7936e {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 663.152633] env[65758]: WARNING nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: a2010738-759b-480a-8360-2639788056b1] Received unexpected event network-vif-plugged-acfd4b22-8660-461e-9e71-fa7134a7936e for instance with vm_state building and task_state spawning. [ 663.152633] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: a2010738-759b-480a-8360-2639788056b1] Received event network-changed-acfd4b22-8660-461e-9e71-fa7134a7936e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 663.152903] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: a2010738-759b-480a-8360-2639788056b1] Refreshing instance network info cache due to event network-changed-acfd4b22-8660-461e-9e71-fa7134a7936e. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 663.153026] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Acquiring lock "refresh_cache-a2010738-759b-480a-8360-2639788056b1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.153126] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Acquired lock "refresh_cache-a2010738-759b-480a-8360-2639788056b1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.153248] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: a2010738-759b-480a-8360-2639788056b1] Refreshing network info cache for port acfd4b22-8660-461e-9e71-fa7134a7936e {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 663.176623] env[65758]: DEBUG nova.compute.manager [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-changed-026141b1-3811-4baa-8195-d418fa316270 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 663.176623] env[65758]: DEBUG nova.compute.manager [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Refreshing instance network info cache due to event network-changed-026141b1-3811-4baa-8195-d418fa316270. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 663.177217] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Acquiring lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.177217] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Acquired lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.177955] env[65758]: DEBUG nova.network.neutron [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Refreshing network info cache for port 026141b1-3811-4baa-8195-d418fa316270 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 663.183322] env[65758]: WARNING openstack [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 663.183713] env[65758]: WARNING openstack [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 663.242134] env[65758]: DEBUG nova.network.neutron [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 663.401614] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527cd2ab-76bb-7705-357f-88f4c93842fe, 'name': SearchDatastore_Task, 'duration_secs': 0.059332} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.405986] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.406375] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.406692] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.406916] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.407171] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.411440] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dbc150a-474d-42ed-89a0-5c57ff7c4546 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.428692] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565421} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.439915] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 483765b5-c63c-4aac-9082-519bbc4e6eb5/483765b5-c63c-4aac-9082-519bbc4e6eb5.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 663.440763] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 663.441667] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660026, 'name': Rename_Task, 'duration_secs': 0.189458} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.444543] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13beb016-59f2-4b9f-a956-e12d704bfc48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.450832] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 663.450832] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.450832] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.450832] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a99607ae-7b59-4265-a560-e4d3dc17ff14 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.452858] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-064b48a8-71fa-4af1-800a-5df18e97eac8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.462933] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 663.462933] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525abfaf-6a18-bc20-e172-7f7d9f23ae81" [ 663.462933] env[65758]: _type = "Task" [ 663.462933] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.465960] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 663.465960] env[65758]: value = "task-4660029" [ 663.465960] env[65758]: _type = "Task" [ 663.465960] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.466332] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 663.466332] env[65758]: value = "task-4660028" [ 663.466332] env[65758]: _type = "Task" [ 663.466332] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.498447] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525abfaf-6a18-bc20-e172-7f7d9f23ae81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.509361] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660028, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.509780] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660027, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.511865] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.613263] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquiring lock "28ccc013-962d-4607-83a2-5fcd480c27b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.613672] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "28ccc013-962d-4607-83a2-5fcd480c27b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.613902] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquiring lock "28ccc013-962d-4607-83a2-5fcd480c27b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.614131] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "28ccc013-962d-4607-83a2-5fcd480c27b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.614305] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "28ccc013-962d-4607-83a2-5fcd480c27b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.618239] env[65758]: INFO nova.compute.manager [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Terminating instance [ 663.657815] env[65758]: WARNING neutronclient.v2_0.client [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 663.659310] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 663.660066] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 663.680921] env[65758]: WARNING neutronclient.v2_0.client [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 663.681985] env[65758]: WARNING openstack [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 663.682150] env[65758]: WARNING openstack [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 663.791216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "0ac196fa-d88c-45a8-999e-8b5216912041" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.791595] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "0ac196fa-d88c-45a8-999e-8b5216912041" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.791773] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "0ac196fa-d88c-45a8-999e-8b5216912041-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.791975] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "0ac196fa-d88c-45a8-999e-8b5216912041-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.792143] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "0ac196fa-d88c-45a8-999e-8b5216912041-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.796668] env[65758]: INFO nova.compute.manager [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Terminating instance [ 663.895112] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67d927b-8eda-43cc-b985-0ffd34d6afc5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.907251] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd7af4a-10fd-4af9-b70d-5b17df9d55f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.939309] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1517b15-bb73-42c2-bbfc-56719a8a3a7e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.948888] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec00d008-68e7-4ea7-a52a-8be126f15d91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.964704] env[65758]: DEBUG nova.compute.provider_tree [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.983678] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525abfaf-6a18-bc20-e172-7f7d9f23ae81, 'name': SearchDatastore_Task, 'duration_secs': 0.058394} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.985530] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c8815d6-9861-4065-8e2b-62623aee4a01 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.997505] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135304} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.997726] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.780429} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.997921] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660028, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.998682] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 663.999170] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 0addcbb1-3561-4c93-b714-37e6b613b962/0addcbb1-3561-4c93-b714-37e6b613b962.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 663.999350] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.000040] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baaa5b65-fcde-4dc9-9fcb-03405b207b4e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.003061] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b6f5b88-3a0c-4ff8-997c-51e080e95d4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.008226] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 664.008226] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52222ab8-c8f6-2ac4-672f-3105a71e1e81" [ 664.008226] env[65758]: _type = "Task" [ 664.008226] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.036759] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 483765b5-c63c-4aac-9082-519bbc4e6eb5/483765b5-c63c-4aac-9082-519bbc4e6eb5.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.039045] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14bf3ec4-9d6c-4e95-bbe8-0d6008e5f7d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.053972] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 664.053972] env[65758]: value = "task-4660030" [ 664.053972] env[65758]: _type = "Task" [ 664.053972] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.058847] env[65758]: WARNING neutronclient.v2_0.client [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 664.059620] env[65758]: WARNING openstack [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 664.059979] env[65758]: WARNING openstack [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 664.068344] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52222ab8-c8f6-2ac4-672f-3105a71e1e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.075975] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 664.075975] env[65758]: value = "task-4660031" [ 664.075975] env[65758]: _type = "Task" [ 664.075975] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.079175] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660030, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.088766] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660031, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.124691] env[65758]: DEBUG nova.compute.manager [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 664.125030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 664.126030] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a27a6d-1eb8-4bc7-88c0-0bb4ae965d67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.136364] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 664.136644] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d5407fe-4232-4c4a-860c-eaffa7576839 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.145777] env[65758]: DEBUG oslo_vmware.api [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 664.145777] env[65758]: value = "task-4660032" [ 664.145777] env[65758]: _type = "Task" [ 664.145777] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.160342] env[65758]: DEBUG oslo_vmware.api [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4660032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.304777] env[65758]: DEBUG nova.compute.manager [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 664.305157] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 664.307032] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3162a1a-f9c7-4166-9dac-d6c36ece72c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.318657] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 664.319032] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41d96c98-f428-401b-849d-9e6c8c899f08 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.332035] env[65758]: DEBUG oslo_vmware.api [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 664.332035] env[65758]: value = "task-4660033" [ 664.332035] env[65758]: _type = "Task" [ 664.332035] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.344959] env[65758]: DEBUG oslo_vmware.api [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.469038] env[65758]: DEBUG nova.scheduler.client.report [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 664.492909] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660028, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.522228] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52222ab8-c8f6-2ac4-672f-3105a71e1e81, 'name': SearchDatastore_Task, 'duration_secs': 0.042473} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.522922] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.523219] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.523764] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14269cc3-4181-487b-b74d-da436662ab55 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.533696] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 664.533696] env[65758]: value = "task-4660034" [ 664.533696] env[65758]: _type = "Task" [ 664.533696] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.546996] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.581497] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.260321} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.585416] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.586863] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57292a0-a8ba-4c47-9dac-8ad2fc32c282 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.613141] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660031, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.621959] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 0addcbb1-3561-4c93-b714-37e6b613b962/0addcbb1-3561-4c93-b714-37e6b613b962.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.622357] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-648de16f-675e-4152-86b5-2d88e91a1160 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.646195] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 664.646195] env[65758]: value = "task-4660035" [ 664.646195] env[65758]: _type = "Task" [ 664.646195] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.662968] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660035, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.667140] env[65758]: DEBUG oslo_vmware.api [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4660032, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.845433] env[65758]: DEBUG oslo_vmware.api [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660033, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.977773] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.978406] env[65758]: DEBUG nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 664.987267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.504s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.989674] env[65758]: INFO nova.compute.claims [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.002862] env[65758]: DEBUG oslo_vmware.api [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660028, 'name': PowerOnVM_Task, 'duration_secs': 1.262669} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.003353] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 665.003749] env[65758]: INFO nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Took 12.31 seconds to spawn the instance on the hypervisor. [ 665.004078] env[65758]: DEBUG nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 665.005239] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23dcf4c-ba33-4a42-9598-a5e9f4bd1179 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.047335] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660034, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.094941] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660031, 'name': ReconfigVM_Task, 'duration_secs': 0.705162} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.095266] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 483765b5-c63c-4aac-9082-519bbc4e6eb5/483765b5-c63c-4aac-9082-519bbc4e6eb5.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.096599] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f69e2378-5445-45c2-ba6b-ccf6948900cd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.105840] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 665.105840] env[65758]: value = "task-4660036" [ 665.105840] env[65758]: _type = "Task" [ 665.105840] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.122453] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660036, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.162975] env[65758]: DEBUG oslo_vmware.api [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4660032, 'name': PowerOffVM_Task, 'duration_secs': 0.780719} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.172406] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 665.172609] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 665.172876] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660035, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.173480] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e11b8a7-50cc-4a61-ada6-7ca95cfbf2e8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.286363] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 665.286363] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 665.286363] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Deleting the datastore file [datastore1] 28ccc013-962d-4607-83a2-5fcd480c27b2 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 665.286363] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb364349-0ba7-4e11-9fa3-d628ef3ed7ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.294046] env[65758]: DEBUG oslo_vmware.api [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for the task: (returnval){ [ 665.294046] env[65758]: value = "task-4660038" [ 665.294046] env[65758]: _type = "Task" [ 665.294046] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.307818] env[65758]: DEBUG oslo_vmware.api [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4660038, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.343764] env[65758]: DEBUG oslo_vmware.api [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660033, 'name': PowerOffVM_Task, 'duration_secs': 0.605018} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.344403] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 665.344819] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 665.345327] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-546f40d4-64be-49f7-b63f-9a43ddd5d8b3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.438394] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 665.438830] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 665.439202] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleting the datastore file [datastore2] 0ac196fa-d88c-45a8-999e-8b5216912041 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 665.442019] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24653298-e742-49a4-a0f4-022cd49e971c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.451585] env[65758]: DEBUG oslo_vmware.api [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 665.451585] env[65758]: value = "task-4660040" [ 665.451585] env[65758]: _type = "Task" [ 665.451585] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.462913] env[65758]: DEBUG oslo_vmware.api [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.490636] env[65758]: DEBUG nova.compute.utils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.490636] env[65758]: DEBUG nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 665.490636] env[65758]: DEBUG nova.network.neutron [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 665.491437] env[65758]: WARNING neutronclient.v2_0.client [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 665.491945] env[65758]: WARNING neutronclient.v2_0.client [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 665.492910] env[65758]: WARNING openstack [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 665.493735] env[65758]: WARNING openstack [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 665.505989] env[65758]: DEBUG nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 665.534641] env[65758]: INFO nova.compute.manager [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Took 34.22 seconds to build instance. [ 665.557304] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660034, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602086} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.557647] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.557900] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.558548] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-beffff42-d3d5-44aa-ae5b-8d1fb9d1724f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.569981] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 665.569981] env[65758]: value = "task-4660041" [ 665.569981] env[65758]: _type = "Task" [ 665.569981] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.584478] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.619426] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660036, 'name': Rename_Task, 'duration_secs': 0.321201} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.619778] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 665.620190] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fc51843-fb58-4f97-9276-5debf6d19fd7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.628834] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 665.628834] env[65758]: value = "task-4660042" [ 665.628834] env[65758]: _type = "Task" [ 665.628834] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.639851] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660042, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.659208] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660035, 'name': ReconfigVM_Task, 'duration_secs': 0.57319} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.659525] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 0addcbb1-3561-4c93-b714-37e6b613b962/0addcbb1-3561-4c93-b714-37e6b613b962.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.660236] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf672638-c04d-4145-a659-4a2298b809a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.669248] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 665.669248] env[65758]: value = "task-4660043" [ 665.669248] env[65758]: _type = "Task" [ 665.669248] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.679256] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660043, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.804407] env[65758]: DEBUG oslo_vmware.api [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Task: {'id': task-4660038, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.367565} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.804653] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 665.804829] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 665.805037] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 665.805225] env[65758]: INFO nova.compute.manager [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Took 1.68 seconds to destroy the instance on the hypervisor. [ 665.805468] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 665.805659] env[65758]: DEBUG nova.compute.manager [-] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 665.805756] env[65758]: DEBUG nova.network.neutron [-] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 665.805999] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 665.806671] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 665.806796] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 665.966872] env[65758]: DEBUG oslo_vmware.api [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288018} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.967136] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 665.967428] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 665.967678] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 665.967922] env[65758]: INFO nova.compute.manager [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Took 1.66 seconds to destroy the instance on the hypervisor. [ 665.968270] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 665.968773] env[65758]: DEBUG nova.compute.manager [-] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 665.968773] env[65758]: DEBUG nova.network.neutron [-] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 665.969085] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 665.969891] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 665.970262] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 666.041023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-633332b0-ae04-40bd-a631-af8bc36c9f43 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "a2010738-759b-480a-8360-2639788056b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.028s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.085230] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095874} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.089474] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 666.091176] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effb9695-957c-4634-ae98-ed0cc06ce95b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.115213] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 666.118600] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfa7e402-75d4-487b-b5df-3e6688055485 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.147277] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660042, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.152326] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 666.152326] env[65758]: value = "task-4660044" [ 666.152326] env[65758]: _type = "Task" [ 666.152326] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.164769] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660044, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.180056] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660043, 'name': Rename_Task, 'duration_secs': 0.356923} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.180056] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.180348] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae7ea3a7-509d-4de0-9da8-2a860029d5eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.188735] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 666.188735] env[65758]: value = "task-4660045" [ 666.188735] env[65758]: _type = "Task" [ 666.188735] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.201384] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.244521] env[65758]: DEBUG nova.network.neutron [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Updating instance_info_cache with network_info: [{"id": "8c662ce6-206b-49ce-836c-0bbc9792f182", "address": "fa:16:3e:60:f6:9a", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c662ce6-20", "ovs_interfaceid": "8c662ce6-206b-49ce-836c-0bbc9792f182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 666.377020] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.377513] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.500727] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 666.526279] env[65758]: DEBUG nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 666.528687] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7fa9b0-f2aa-4d59-a62a-3d4604f42d74 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.540959] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf7bfc5-d1f3-4083-803b-0770dd933b11 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.548407] env[65758]: DEBUG nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 666.595774] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dcd111-8dcb-4586-a280-0f5a5350ed4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.605676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a763aa-04dd-481c-8422-0cc3c53926d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.616306] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 666.616615] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 666.616785] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 666.617052] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 666.617248] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 666.617416] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 666.617639] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 666.617976] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 666.617976] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 666.618166] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 666.618363] env[65758]: DEBUG nova.virt.hardware [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 666.620177] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac91a28-fe38-401f-b150-bbedbab58124 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.641230] env[65758]: DEBUG nova.compute.provider_tree [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.650780] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8588c9-c001-453e-a415-bcf0af52ad3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.657559] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 666.669224] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660042, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.684248] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.704067] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660045, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.746821] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Releasing lock "refresh_cache-24016efd-cdb3-4c1e-9c08-8643400e729e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.747338] env[65758]: DEBUG nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Instance network_info: |[{"id": "8c662ce6-206b-49ce-836c-0bbc9792f182", "address": "fa:16:3e:60:f6:9a", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c662ce6-20", "ovs_interfaceid": "8c662ce6-206b-49ce-836c-0bbc9792f182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 666.747944] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:f6:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c662ce6-206b-49ce-836c-0bbc9792f182', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 666.758062] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 666.758495] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 666.758859] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-874cf839-db1a-4fe0-9a20-2864836df37e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.787187] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 666.787187] env[65758]: value = "task-4660046" [ 666.787187] env[65758]: _type = "Task" [ 666.787187] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.796763] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660046, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.890320] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.890712] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.890986] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.892820] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.892820] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.892820] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.892820] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 666.892820] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 667.078973] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.125589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquiring lock "83b637d8-b9fa-4159-b879-c1d737871539" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.125589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "83b637d8-b9fa-4159-b879-c1d737871539" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.125589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquiring lock "83b637d8-b9fa-4159-b879-c1d737871539-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.125589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "83b637d8-b9fa-4159-b879-c1d737871539-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.125589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "83b637d8-b9fa-4159-b879-c1d737871539-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.127803] env[65758]: INFO nova.compute.manager [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Terminating instance [ 667.151404] env[65758]: DEBUG nova.scheduler.client.report [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.161727] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660042, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.176264] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660044, 'name': ReconfigVM_Task, 'duration_secs': 0.866107} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.176264] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab/24379189-b10a-4ef6-a3f6-b7bb43029dab.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 667.176264] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8a5d315-9809-4f68-b1d9-9ba14640622c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.195125] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 667.195125] env[65758]: value = "task-4660047" [ 667.195125] env[65758]: _type = "Task" [ 667.195125] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.213437] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660045, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.217591] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660047, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.300659] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660046, 'name': CreateVM_Task, 'duration_secs': 0.407945} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.300659] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 667.302128] env[65758]: WARNING neutronclient.v2_0.client [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 667.302528] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.304800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.304800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 667.304800] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cb2ec77-593d-4066-b3a2-89e3ba19e85f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.317566] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 667.317566] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522938bc-3421-a194-55f3-100426874036" [ 667.317566] env[65758]: _type = "Task" [ 667.317566] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.330816] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522938bc-3421-a194-55f3-100426874036, 'name': SearchDatastore_Task, 'duration_secs': 0.011466} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.331314] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.331428] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 667.331678] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.331797] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.331977] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 667.332474] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79bdf29a-fd76-4f9c-a890-0b82813de9d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.348440] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 667.348778] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 667.350059] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f9df8fc-d123-4feb-b864-5ac598e07047 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.360178] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 667.360178] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bcd7ee-d9cf-1a80-0195-e2f9f9e82c3b" [ 667.360178] env[65758]: _type = "Task" [ 667.360178] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.375884] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bcd7ee-d9cf-1a80-0195-e2f9f9e82c3b, 'name': SearchDatastore_Task, 'duration_secs': 0.012352} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.378066] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b68ef0c1-1360-42a3-a1a1-8511e09b9f19 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.386284] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 667.386284] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52cd596b-3a6e-0e10-8c0d-8bd79f0e19c3" [ 667.386284] env[65758]: _type = "Task" [ 667.386284] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.397600] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.398590] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52cd596b-3a6e-0e10-8c0d-8bd79f0e19c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.634329] env[65758]: DEBUG nova.compute.manager [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 667.634643] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.635694] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c174ef-757e-4d4f-b8c9-6f3c8c3d766f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.648302] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 667.650395] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9ac0ea0-5157-4555-ae6d-2a4ffe9a41a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.658691] env[65758]: DEBUG oslo_vmware.api [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660042, 'name': PowerOnVM_Task, 'duration_secs': 1.782801} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.661045] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.661045] env[65758]: INFO nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Took 17.71 seconds to spawn the instance on the hypervisor. [ 667.661193] env[65758]: DEBUG nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 667.661907] env[65758]: DEBUG oslo_vmware.api [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 667.661907] env[65758]: value = "task-4660048" [ 667.661907] env[65758]: _type = "Task" [ 667.661907] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.662743] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4e91ac-ca2a-429b-8ec2-5cfc72d7b170 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.667313] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.667660] env[65758]: DEBUG nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 667.677641] env[65758]: DEBUG nova.policy [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebdf3bebc38747cb8daaf7966ec6b089', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2030cc491d604d46bda3753f5a3485a5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 667.680580] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.255s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.680777] env[65758]: DEBUG nova.objects.instance [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 667.700050] env[65758]: DEBUG oslo_vmware.api [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.713760] env[65758]: DEBUG oslo_vmware.api [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660045, 'name': PowerOnVM_Task, 'duration_secs': 1.372249} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.717660] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.717660] env[65758]: INFO nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Took 12.41 seconds to spawn the instance on the hypervisor. [ 667.717854] env[65758]: DEBUG nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 667.719137] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660047, 'name': Rename_Task, 'duration_secs': 0.20198} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.719137] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d359247-7c1d-41ee-a7f1-76956ba4e151 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.722120] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 667.723161] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8f7df51-fff3-4e35-86f4-ee153ba49bfd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.734213] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Waiting for the task: (returnval){ [ 667.734213] env[65758]: value = "task-4660049" [ 667.734213] env[65758]: _type = "Task" [ 667.734213] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.743636] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660049, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.897993] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52cd596b-3a6e-0e10-8c0d-8bd79f0e19c3, 'name': SearchDatastore_Task, 'duration_secs': 0.011957} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.898300] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.898554] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 24016efd-cdb3-4c1e-9c08-8643400e729e/24016efd-cdb3-4c1e-9c08-8643400e729e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 667.898829] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f591b08-929a-4696-b7b9-d2244cac5529 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.906704] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 667.906704] env[65758]: value = "task-4660050" [ 667.906704] env[65758]: _type = "Task" [ 667.906704] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.915572] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.130433] env[65758]: WARNING neutronclient.v2_0.client [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 668.131200] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 668.131550] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 668.157108] env[65758]: WARNING neutronclient.v2_0.client [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 668.157956] env[65758]: WARNING openstack [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 668.158400] env[65758]: WARNING openstack [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 668.180976] env[65758]: DEBUG oslo_vmware.api [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.189667] env[65758]: DEBUG nova.compute.utils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 668.191044] env[65758]: DEBUG nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 668.192116] env[65758]: DEBUG nova.network.neutron [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 668.192116] env[65758]: WARNING neutronclient.v2_0.client [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 668.192116] env[65758]: WARNING neutronclient.v2_0.client [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 668.192520] env[65758]: WARNING openstack [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 668.192904] env[65758]: WARNING openstack [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 668.202941] env[65758]: DEBUG nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 668.212433] env[65758]: INFO nova.compute.manager [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Took 38.52 seconds to build instance. [ 668.249738] env[65758]: INFO nova.compute.manager [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Took 36.49 seconds to build instance. [ 668.255134] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660049, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.419726] env[65758]: DEBUG nova.network.neutron [-] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 668.421772] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660050, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.485665] env[65758]: DEBUG nova.network.neutron [-] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 668.685038] env[65758]: DEBUG oslo_vmware.api [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660048, 'name': PowerOffVM_Task, 'duration_secs': 0.924441} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.685038] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.685266] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 668.685614] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19096bbf-edaf-4205-a0af-49a976a12372 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.689962] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3cd4b323-1d4e-4f29-a64b-2eea8900f797 tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.695074] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.936s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.697305] env[65758]: INFO nova.compute.claims [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.718042] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca385d76-d8d6-4019-a739-df36d88dfb55 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.625s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.758695] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eaa22ba6-65f5-44d0-a98f-de0d71afa885 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "0addcbb1-3561-4c93-b714-37e6b613b962" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.961s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.759141] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660049, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.784030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 668.784030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 668.784030] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Deleting the datastore file [datastore2] 83b637d8-b9fa-4159-b879-c1d737871539 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 668.784030] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8e69697-ca65-404a-8bb3-57958e953867 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.788619] env[65758]: DEBUG nova.network.neutron [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Successfully created port: 13ded2e1-9fb6-4eed-b82c-82509e35b8fb {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 668.795811] env[65758]: DEBUG oslo_vmware.api [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for the task: (returnval){ [ 668.795811] env[65758]: value = "task-4660052" [ 668.795811] env[65758]: _type = "Task" [ 668.795811] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.811701] env[65758]: DEBUG oslo_vmware.api [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.923362] env[65758]: INFO nova.compute.manager [-] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Took 3.12 seconds to deallocate network for instance. [ 668.924970] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63829} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.929022] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 24016efd-cdb3-4c1e-9c08-8643400e729e/24016efd-cdb3-4c1e-9c08-8643400e729e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 668.929022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 668.929998] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6acb0957-d533-4995-9b2a-93fb07dbae62 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.939994] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 668.939994] env[65758]: value = "task-4660053" [ 668.939994] env[65758]: _type = "Task" [ 668.939994] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.949793] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660053, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.990506] env[65758]: INFO nova.compute.manager [-] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Took 3.02 seconds to deallocate network for instance. [ 669.223610] env[65758]: DEBUG nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 669.258344] env[65758]: DEBUG oslo_vmware.api [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Task: {'id': task-4660049, 'name': PowerOnVM_Task, 'duration_secs': 1.355746} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.258922] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 669.259266] env[65758]: DEBUG nova.compute.manager [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 669.262662] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dae1237-77d8-4198-ae6d-ce2df00ebfd9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.292834] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 669.294328] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 669.294328] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 669.294328] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 669.294328] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 669.294505] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 669.294844] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.295087] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 669.295353] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 669.295602] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 669.295859] env[65758]: DEBUG nova.virt.hardware [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 669.297295] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b8f095-f1c3-4f68-9b3b-819d23e9ffec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.335768] env[65758]: DEBUG oslo_vmware.api [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.340739] env[65758]: DEBUG nova.policy [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3a79fbdbdc4294a30f87eabe5719de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9aaf5b39abda42f28a847d5fe0d0ecec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 669.345035] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39b0e98-5729-4af4-8a7e-5605ead33461 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.435356] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.458187] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660053, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.251005} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.458187] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 669.458522] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04bf7cd-948e-4ec7-885c-3648a0875ed7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.483698] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] 24016efd-cdb3-4c1e-9c08-8643400e729e/24016efd-cdb3-4c1e-9c08-8643400e729e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 669.484122] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3076e057-1761-4e13-98ff-d9d798ae47ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.502350] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.510726] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 669.510726] env[65758]: value = "task-4660054" [ 669.510726] env[65758]: _type = "Task" [ 669.510726] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.523993] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660054, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.812314] env[65758]: DEBUG oslo_vmware.api [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Task: {'id': task-4660052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.622878} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.813513] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.813705] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 669.813878] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.814056] env[65758]: INFO nova.compute.manager [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Took 2.18 seconds to destroy the instance on the hypervisor. [ 669.814421] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 669.817077] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.817311] env[65758]: DEBUG nova.compute.manager [-] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 669.817406] env[65758]: DEBUG nova.network.neutron [-] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 669.817622] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 669.818142] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 669.818399] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 670.030020] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660054, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.133462] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a60d6e-b870-4dc6-b240-feed6b4337e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.142901] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa12b56-79c0-499f-95d8-e30ff318741e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.181898] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f33312-a416-4737-87bc-505dce29b7e5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.192133] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b155a031-245e-456a-9fee-3d16a8106e38 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.209546] env[65758]: DEBUG nova.compute.provider_tree [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.526963] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660054, 'name': ReconfigVM_Task, 'duration_secs': 0.692147} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.526963] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Reconfigured VM instance instance-00000018 to attach disk [datastore2] 24016efd-cdb3-4c1e-9c08-8643400e729e/24016efd-cdb3-4c1e-9c08-8643400e729e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 670.526963] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-85f3bd36-fe83-44c7-bac6-7390fcb683e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.534413] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 670.534413] env[65758]: value = "task-4660055" [ 670.534413] env[65758]: _type = "Task" [ 670.534413] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.546933] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660055, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.713677] env[65758]: DEBUG nova.scheduler.client.report [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.869574] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: a2010738-759b-480a-8360-2639788056b1] Updated VIF entry in instance network info cache for port acfd4b22-8660-461e-9e71-fa7134a7936e. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 670.869984] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: a2010738-759b-480a-8360-2639788056b1] Updating instance_info_cache with network_info: [{"id": "acfd4b22-8660-461e-9e71-fa7134a7936e", "address": "fa:16:3e:cd:71:71", "network": {"id": "b8727d6d-5390-46a5-8f17-d072022f93b9", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-137815059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a13c957f3f4521ba7862cf3de6c0c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacfd4b22-86", "ovs_interfaceid": "acfd4b22-8660-461e-9e71-fa7134a7936e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 670.922783] env[65758]: DEBUG nova.network.neutron [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Updated VIF entry in instance network info cache for port 026141b1-3811-4baa-8195-d418fa316270. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 670.924886] env[65758]: DEBUG nova.network.neutron [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Updating instance_info_cache with network_info: [{"id": "d0a812a9-0121-493b-92c4-16221a927a6a", "address": "fa:16:3e:92:b8:aa", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a812a9-01", "ovs_interfaceid": "d0a812a9-0121-493b-92c4-16221a927a6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "026141b1-3811-4baa-8195-d418fa316270", "address": "fa:16:3e:f7:60:d4", "network": {"id": "bfd8cc95-fa08-4bc9-976c-adfbd4c45ea9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2004041920", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.115", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap026141b1-38", "ovs_interfaceid": "026141b1-3811-4baa-8195-d418fa316270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "acba5b06-e536-4848-be4e-db877af4d6ac", "address": "fa:16:3e:11:b1:da", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacba5b06-e5", "ovs_interfaceid": "acba5b06-e536-4848-be4e-db877af4d6ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 670.930273] env[65758]: DEBUG nova.network.neutron [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Successfully updated port: 13ded2e1-9fb6-4eed-b82c-82509e35b8fb {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 671.045874] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660055, 'name': Rename_Task, 'duration_secs': 0.206169} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.046604] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 671.047104] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49b5235f-aa85-4f0b-962a-4afa2eacb641 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.057451] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 671.057451] env[65758]: value = "task-4660056" [ 671.057451] env[65758]: _type = "Task" [ 671.057451] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.067605] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660056, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.220508] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.221143] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 671.224142] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.917s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.226428] env[65758]: INFO nova.compute.claims [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 671.234205] env[65758]: DEBUG nova.network.neutron [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Successfully created port: fc47a856-bb57-45b8-986b-bc9bcf87abe6 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 671.314308] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 671.373778] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Releasing lock "refresh_cache-a2010738-759b-480a-8360-2639788056b1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.374073] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-vif-plugged-acba5b06-e536-4848-be4e-db877af4d6ac {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 671.374276] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Acquiring lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.374493] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.374752] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.374834] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] No waiting events found dispatching network-vif-plugged-acba5b06-e536-4848-be4e-db877af4d6ac {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 671.375107] env[65758]: WARNING nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received unexpected event network-vif-plugged-acba5b06-e536-4848-be4e-db877af4d6ac for instance with vm_state building and task_state spawning. [ 671.375193] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-changed-acba5b06-e536-4848-be4e-db877af4d6ac {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 671.375455] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Refreshing instance network info cache due to event network-changed-acba5b06-e536-4848-be4e-db877af4d6ac. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 671.375455] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Acquiring lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.430938] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Releasing lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.430938] env[65758]: DEBUG nova.compute.manager [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Received event network-vif-plugged-872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 671.430938] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Acquiring lock "0addcbb1-3561-4c93-b714-37e6b613b962-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.430938] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Lock "0addcbb1-3561-4c93-b714-37e6b613b962-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.430938] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Lock "0addcbb1-3561-4c93-b714-37e6b613b962-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.430938] env[65758]: DEBUG nova.compute.manager [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] No waiting events found dispatching network-vif-plugged-872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 671.430938] env[65758]: WARNING nova.compute.manager [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Received unexpected event network-vif-plugged-872949b5-9bac-4f83-acec-93e23be464c5 for instance with vm_state building and task_state spawning. [ 671.430938] env[65758]: DEBUG nova.compute.manager [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Received event network-changed-872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 671.430938] env[65758]: DEBUG nova.compute.manager [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Refreshing instance network info cache due to event network-changed-872949b5-9bac-4f83-acec-93e23be464c5. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 671.435765] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Acquiring lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.435765] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Acquired lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.435765] env[65758]: DEBUG nova.network.neutron [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Refreshing network info cache for port 872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 671.435765] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Acquired lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.435765] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Refreshing network info cache for port acba5b06-e536-4848-be4e-db877af4d6ac {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 671.438155] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquiring lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.439281] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquired lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.439664] env[65758]: DEBUG nova.network.neutron [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 671.573484] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660056, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.731090] env[65758]: DEBUG nova.compute.utils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 671.736117] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 671.736117] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 671.736117] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 671.736638] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 671.737395] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 671.738012] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 671.946015] env[65758]: WARNING neutronclient.v2_0.client [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 671.946728] env[65758]: WARNING openstack [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 671.947814] env[65758]: WARNING openstack [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 671.960447] env[65758]: WARNING neutronclient.v2_0.client [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 671.961463] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 671.962019] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 671.977618] env[65758]: WARNING openstack [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 671.978159] env[65758]: WARNING openstack [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 672.071159] env[65758]: DEBUG oslo_vmware.api [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660056, 'name': PowerOnVM_Task, 'duration_secs': 0.753725} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.071159] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 672.071159] env[65758]: INFO nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Took 10.42 seconds to spawn the instance on the hypervisor. [ 672.071159] env[65758]: DEBUG nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 672.071945] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f540166-446a-4f67-9c05-934a707647da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.236684] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 672.596072] env[65758]: INFO nova.compute.manager [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Took 38.66 seconds to build instance. [ 672.738438] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a683df0-5f55-4c3b-9859-b8a5cfb35352 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.753094] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efd52ff-e530-40d3-8fe7-2132de3a01b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.792787] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d848fb-3852-4e8c-94ca-c37af68b3fd0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.801869] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3ae023-37ac-4dce-995c-a0eb7abb4b6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.817661] env[65758]: DEBUG nova.compute.provider_tree [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.891473] env[65758]: DEBUG nova.network.neutron [-] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 672.984031] env[65758]: DEBUG nova.policy [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b20745f0d074a908467780bdc3ef076', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc1b1a2357a4f34b1093150b27de587', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 673.103176] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca9d317e-438d-402b-b544-238e76e7ffce tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "24016efd-cdb3-4c1e-9c08-8643400e729e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.237s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.254337] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 673.285157] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 673.285430] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 673.285578] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 673.285837] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 673.285911] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 673.286016] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 673.286223] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 673.286375] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 673.286533] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 673.286687] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 673.286954] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 673.287833] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529a1992-ead6-42e2-9696-611676855b1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.300820] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476a620e-edb7-4efd-b933-1c87f52dc2eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.324813] env[65758]: DEBUG nova.scheduler.client.report [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 673.393636] env[65758]: INFO nova.compute.manager [-] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Took 3.58 seconds to deallocate network for instance. [ 673.458091] env[65758]: DEBUG nova.network.neutron [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Successfully updated port: fc47a856-bb57-45b8-986b-bc9bcf87abe6 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 673.653178] env[65758]: DEBUG nova.network.neutron [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 673.830346] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.830880] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 673.834560] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.943s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.835251] env[65758]: DEBUG nova.objects.instance [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 673.901810] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.961641] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "refresh_cache-9e007d55-0a5c-4469-a546-9b18e188bea0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.961846] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "refresh_cache-9e007d55-0a5c-4469-a546-9b18e188bea0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.962016] env[65758]: DEBUG nova.network.neutron [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 674.272973] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.273385] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.341220] env[65758]: DEBUG nova.compute.utils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 674.341220] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 674.341857] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 674.342582] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 674.343054] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 674.343986] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 674.344509] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 674.468287] env[65758]: WARNING openstack [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 674.468287] env[65758]: WARNING openstack [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 674.778386] env[65758]: DEBUG nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 674.857269] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 674.862542] env[65758]: DEBUG oslo_concurrency.lockutils [None req-be4e74c5-aa22-4e05-954a-c23438755ba1 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.028s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.863971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.929s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.865818] env[65758]: INFO nova.compute.claims [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.909365] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Successfully created port: 67c82890-e746-45ff-9f1b-e905834b0064 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 674.946464] env[65758]: DEBUG nova.policy [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b20745f0d074a908467780bdc3ef076', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc1b1a2357a4f34b1093150b27de587', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 675.235825] env[65758]: DEBUG nova.network.neutron [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 675.306974] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.870253] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 675.908642] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 675.909484] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 675.909484] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 675.909884] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 675.910242] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 675.910619] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 675.911143] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 675.911686] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 675.911804] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 675.912486] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 675.912561] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 675.915424] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24789090-7eac-496e-b58e-e4cbecfccf6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.934154] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2659c88-e493-4912-a7c7-781b519d1418 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.399844] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Successfully created port: cd42e1b4-a8b0-4277-8fe9-1ea960c938ce {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 676.432431] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfa2bc3-ebcf-4b1f-9a4b-704e0902f6db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.442972] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6267170-f233-4a27-bfbc-e75d90e63dcf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.477934] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061e1783-e731-4a3a-8aff-c8572f43f6a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.486951] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0f8e76-1968-433f-b2f8-58d37d1d98e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.502066] env[65758]: DEBUG nova.compute.provider_tree [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.005550] env[65758]: DEBUG nova.scheduler.client.report [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 677.512036] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.512948] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 677.516639] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.387s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.518387] env[65758]: INFO nova.compute.claims [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.540283] env[65758]: WARNING neutronclient.v2_0.client [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 677.540283] env[65758]: WARNING openstack [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 677.540283] env[65758]: WARNING openstack [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 677.686821] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Successfully updated port: 67c82890-e746-45ff-9f1b-e905834b0064 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 677.975940] env[65758]: WARNING neutronclient.v2_0.client [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 677.976127] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 677.976488] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 678.006260] env[65758]: WARNING neutronclient.v2_0.client [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 678.006260] env[65758]: WARNING openstack [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 678.006260] env[65758]: WARNING openstack [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 678.021348] env[65758]: DEBUG nova.compute.utils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 678.023218] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 678.023606] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 678.024037] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 678.024471] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 678.029243] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 678.029243] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 678.192305] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "refresh_cache-492d1063-8eaf-4207-8d65-341fbc0b6c39" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.192305] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "refresh_cache-492d1063-8eaf-4207-8d65-341fbc0b6c39" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.192305] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 678.537024] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 678.696562] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 678.696995] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 678.843173] env[65758]: WARNING neutronclient.v2_0.client [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 678.843173] env[65758]: WARNING openstack [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 678.843173] env[65758]: WARNING openstack [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 678.951681] env[65758]: DEBUG nova.policy [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b20745f0d074a908467780bdc3ef076', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc1b1a2357a4f34b1093150b27de587', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 678.996500] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74dcdef-7651-4f43-93bb-526578800955 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.005156] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be0b901-f008-4828-9964-e87f592f3b5c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.054542] env[65758]: DEBUG nova.network.neutron [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Updating instance_info_cache with network_info: [{"id": "13ded2e1-9fb6-4eed-b82c-82509e35b8fb", "address": "fa:16:3e:5b:e3:fa", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13ded2e1-9f", "ovs_interfaceid": "13ded2e1-9fb6-4eed-b82c-82509e35b8fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 679.056174] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e0900a-62d9-4984-92f7-afe3c52524ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.065911] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57819adb-6372-4d72-bd2a-8476443358f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.081412] env[65758]: DEBUG nova.compute.provider_tree [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.105820] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 679.249620] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Successfully updated port: cd42e1b4-a8b0-4277-8fe9-1ea960c938ce {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 679.339209] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Updated VIF entry in instance network info cache for port acba5b06-e536-4848-be4e-db877af4d6ac. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 679.340092] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Updating instance_info_cache with network_info: [{"id": "d0a812a9-0121-493b-92c4-16221a927a6a", "address": "fa:16:3e:92:b8:aa", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a812a9-01", "ovs_interfaceid": "d0a812a9-0121-493b-92c4-16221a927a6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "026141b1-3811-4baa-8195-d418fa316270", "address": "fa:16:3e:f7:60:d4", "network": {"id": "bfd8cc95-fa08-4bc9-976c-adfbd4c45ea9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2004041920", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.115", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap026141b1-38", "ovs_interfaceid": "026141b1-3811-4baa-8195-d418fa316270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "acba5b06-e536-4848-be4e-db877af4d6ac", "address": "fa:16:3e:11:b1:da", "network": {"id": "5da5cef5-60bb-4223-87ae-38b2191495b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-760279645", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacba5b06-e5", "ovs_interfaceid": "acba5b06-e536-4848-be4e-db877af4d6ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 679.377512] env[65758]: DEBUG nova.network.neutron [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Updated VIF entry in instance network info cache for port 872949b5-9bac-4f83-acec-93e23be464c5. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 679.378255] env[65758]: DEBUG nova.network.neutron [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Updating instance_info_cache with network_info: [{"id": "872949b5-9bac-4f83-acec-93e23be464c5", "address": "fa:16:3e:f8:7d:e0", "network": {"id": "115e8c49-6d73-405e-a185-9072fb560eb2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1829868231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ca73ea9954543e38b16a12b37d531c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap872949b5-9b", "ovs_interfaceid": "872949b5-9bac-4f83-acec-93e23be464c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 679.539500] env[65758]: DEBUG nova.compute.manager [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Received event network-vif-plugged-8c662ce6-206b-49ce-836c-0bbc9792f182 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 679.539816] env[65758]: DEBUG oslo_concurrency.lockutils [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Acquiring lock "24016efd-cdb3-4c1e-9c08-8643400e729e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.540104] env[65758]: DEBUG oslo_concurrency.lockutils [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Lock "24016efd-cdb3-4c1e-9c08-8643400e729e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.540264] env[65758]: DEBUG oslo_concurrency.lockutils [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Lock "24016efd-cdb3-4c1e-9c08-8643400e729e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.540473] env[65758]: DEBUG nova.compute.manager [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] No waiting events found dispatching network-vif-plugged-8c662ce6-206b-49ce-836c-0bbc9792f182 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 679.540764] env[65758]: WARNING nova.compute.manager [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Received unexpected event network-vif-plugged-8c662ce6-206b-49ce-836c-0bbc9792f182 for instance with vm_state active and task_state None. [ 679.541669] env[65758]: DEBUG nova.compute.manager [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Received event network-changed-8c662ce6-206b-49ce-836c-0bbc9792f182 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 679.541669] env[65758]: DEBUG nova.compute.manager [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Refreshing instance network info cache due to event network-changed-8c662ce6-206b-49ce-836c-0bbc9792f182. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 679.541669] env[65758]: DEBUG oslo_concurrency.lockutils [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Acquiring lock "refresh_cache-24016efd-cdb3-4c1e-9c08-8643400e729e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.541935] env[65758]: DEBUG oslo_concurrency.lockutils [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Acquired lock "refresh_cache-24016efd-cdb3-4c1e-9c08-8643400e729e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.542535] env[65758]: DEBUG nova.network.neutron [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Refreshing network info cache for port 8c662ce6-206b-49ce-836c-0bbc9792f182 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 679.555029] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 679.561029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Releasing lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.561468] env[65758]: DEBUG nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Instance network_info: |[{"id": "13ded2e1-9fb6-4eed-b82c-82509e35b8fb", "address": "fa:16:3e:5b:e3:fa", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13ded2e1-9f", "ovs_interfaceid": "13ded2e1-9fb6-4eed-b82c-82509e35b8fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 679.562138] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:e3:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13ded2e1-9fb6-4eed-b82c-82509e35b8fb', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 679.570753] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Creating folder: Project (2030cc491d604d46bda3753f5a3485a5). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 679.572496] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9937f0e-565d-4d24-9e02-cb18f73bb755 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.586043] env[65758]: DEBUG nova.scheduler.client.report [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.593900] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Created folder: Project (2030cc491d604d46bda3753f5a3485a5) in parent group-v909763. [ 679.594312] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Creating folder: Instances. Parent ref: group-v909833. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 679.594728] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e10550ba-ab74-45b3-ace6-a9cbaa7cc94d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.601633] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 679.601633] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 679.601633] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 679.601633] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 679.601633] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 679.602013] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 679.602013] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 679.602167] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 679.602443] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 679.602633] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 679.602800] env[65758]: DEBUG nova.virt.hardware [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 679.603835] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa9b0ab-5e80-4fc3-8b6b-24d7740b3a65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.608951] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Created folder: Instances in parent group-v909833. [ 679.609247] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 679.609927] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 679.610245] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20eef9f3-72a6-4bc3-bcc0-f69bef3a04b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.632813] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d9c98d-ada0-4d86-a14c-1f8ab58db2e1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.641180] env[65758]: DEBUG nova.network.neutron [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Updating instance_info_cache with network_info: [{"id": "fc47a856-bb57-45b8-986b-bc9bcf87abe6", "address": "fa:16:3e:cc:22:d7", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc47a856-bb", "ovs_interfaceid": "fc47a856-bb57-45b8-986b-bc9bcf87abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 679.642547] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 679.642547] env[65758]: value = "task-4660059" [ 679.642547] env[65758]: _type = "Task" [ 679.642547] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.665602] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660059, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.703627] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Successfully created port: 533485bf-4e5b-467a-a80c-4e9867e7efbe {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 679.755027] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "refresh_cache-03073968-e679-4ce5-9f84-c4765217b308" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.756792] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "refresh_cache-03073968-e679-4ce5-9f84-c4765217b308" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.756792] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 679.824649] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 679.825332] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 679.825755] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 679.844588] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Releasing lock "refresh_cache-483765b5-c63c-4aac-9082-519bbc4e6eb5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.844691] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Received event network-changed-0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 679.844771] env[65758]: DEBUG nova.compute.manager [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Refreshing instance network info cache due to event network-changed-0e626ecf-0686-4626-9e0c-31a51751b185. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 679.844990] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Acquiring lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.845136] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Acquired lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.845289] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Refreshing network info cache for port 0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 679.883111] env[65758]: DEBUG oslo_concurrency.lockutils [req-13d12630-d208-405a-ab67-07dfbe41d8b6 req-8eaa9067-0143-4e70-8cd9-4352c874f152 service nova] Releasing lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.047112] env[65758]: WARNING neutronclient.v2_0.client [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.047817] env[65758]: WARNING openstack [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 680.048191] env[65758]: WARNING openstack [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 680.096538] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.097561] env[65758]: DEBUG nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 680.100264] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.022s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.102076] env[65758]: INFO nova.compute.claims [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.143971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "refresh_cache-9e007d55-0a5c-4469-a546-9b18e188bea0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.143971] env[65758]: DEBUG nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Instance network_info: |[{"id": "fc47a856-bb57-45b8-986b-bc9bcf87abe6", "address": "fa:16:3e:cc:22:d7", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc47a856-bb", "ovs_interfaceid": "fc47a856-bb57-45b8-986b-bc9bcf87abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 680.144584] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:22:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc47a856-bb57-45b8-986b-bc9bcf87abe6', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.153021] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 680.157257] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.157558] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cc48367-f1b1-431e-b288-a1e2c658eec9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.178881] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660059, 'name': CreateVM_Task, 'duration_secs': 0.370111} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.180628] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 680.180887] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.180887] env[65758]: value = "task-4660060" [ 680.180887] env[65758]: _type = "Task" [ 680.180887] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.181461] env[65758]: WARNING neutronclient.v2_0.client [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.181851] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.182020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.182370] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 680.187194] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-055d1e06-7bb9-43ce-afd2-46c4adce5961 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.196305] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 680.196305] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d58550-9b68-3aca-e469-3ce3bd9e795e" [ 680.196305] env[65758]: _type = "Task" [ 680.196305] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.200443] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660060, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.209923] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d58550-9b68-3aca-e469-3ce3bd9e795e, 'name': SearchDatastore_Task, 'duration_secs': 0.010798} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.210246] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.210766] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 680.210766] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.210942] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.211031] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 680.211615] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f38982fe-10ed-4fe9-95a9-f40f8e046f72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.220439] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 680.220751] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 680.221395] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57e45226-fd67-4bc6-bf87-317fd06d621c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.227583] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 680.227583] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52eb8886-2474-27ac-e4f5-c01b747d8a15" [ 680.227583] env[65758]: _type = "Task" [ 680.227583] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.239755] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52eb8886-2474-27ac-e4f5-c01b747d8a15, 'name': SearchDatastore_Task, 'duration_secs': 0.009742} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.240570] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0b8e030-79a8-4e99-801a-f9e83cee3719 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.247332] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 680.247332] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5219c9ed-d427-a861-9523-cc75bb7e831e" [ 680.247332] env[65758]: _type = "Task" [ 680.247332] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.256862] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5219c9ed-d427-a861-9523-cc75bb7e831e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.260438] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 680.260438] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 680.348400] env[65758]: WARNING neutronclient.v2_0.client [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.349242] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 680.349609] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 680.610521] env[65758]: DEBUG nova.compute.utils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 680.615257] env[65758]: DEBUG nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 680.615505] env[65758]: DEBUG nova.network.neutron [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 680.615873] env[65758]: WARNING neutronclient.v2_0.client [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.616275] env[65758]: WARNING neutronclient.v2_0.client [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.616920] env[65758]: WARNING openstack [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 680.617408] env[65758]: WARNING openstack [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 680.693428] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660060, 'name': CreateVM_Task, 'duration_secs': 0.394535} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.693670] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 680.694228] env[65758]: WARNING neutronclient.v2_0.client [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 680.695613] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.695806] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.696142] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 680.696426] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9831ba27-5e27-47dc-af02-b1e26e67cfae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.703179] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 680.703179] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a5169e-ecea-94f4-79a8-2cae8ccbd738" [ 680.703179] env[65758]: _type = "Task" [ 680.703179] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.714374] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a5169e-ecea-94f4-79a8-2cae8ccbd738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.761804] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5219c9ed-d427-a861-9523-cc75bb7e831e, 'name': SearchDatastore_Task, 'duration_secs': 0.012431} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.762293] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.762682] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 8eb65797-072b-4a7e-853d-26c0adc51bb2/8eb65797-072b-4a7e-853d-26c0adc51bb2.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 680.763079] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7850b192-acff-441c-aa85-52c8b052a3e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.773228] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 680.773228] env[65758]: value = "task-4660061" [ 680.773228] env[65758]: _type = "Task" [ 680.773228] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.784489] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.961420] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Updating instance_info_cache with network_info: [{"id": "67c82890-e746-45ff-9f1b-e905834b0064", "address": "fa:16:3e:21:f7:12", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c82890-e7", "ovs_interfaceid": "67c82890-e746-45ff-9f1b-e905834b0064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 681.014421] env[65758]: DEBUG nova.compute.manager [req-cf354f11-0ddd-42dd-aaec-23e5bbe57fb3 req-dc37fb53-ad4d-4b0d-b650-b0393c7ad0a5 service nova] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Received event network-vif-deleted-b3d6b993-f74a-48db-b23d-102e47b4f09b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 681.038881] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 681.116208] env[65758]: DEBUG nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 681.149187] env[65758]: DEBUG nova.policy [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '04adba2b8a6c4950be02ba0f07bf1431', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df8b333d08c54634b617dd8284143beb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 681.224081] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a5169e-ecea-94f4-79a8-2cae8ccbd738, 'name': SearchDatastore_Task, 'duration_secs': 0.011058} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.227642] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.227894] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.228155] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.228305] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.228548] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.229193] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f477e83-a6bb-4916-a704-4eb1ec38d547 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.249667] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.250282] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.250745] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3f1c337-4b4b-47cc-976e-3c533a0dd285 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.259129] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 681.259129] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d88c5e-eb2c-64db-1394-da97f166e831" [ 681.259129] env[65758]: _type = "Task" [ 681.259129] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.276745] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d88c5e-eb2c-64db-1394-da97f166e831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.297076] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660061, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519508} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.298258] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 8eb65797-072b-4a7e-853d-26c0adc51bb2/8eb65797-072b-4a7e-853d-26c0adc51bb2.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 681.298414] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 681.298557] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-befa989e-f723-46cd-88be-0169ddc164da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.308919] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 681.308919] env[65758]: value = "task-4660062" [ 681.308919] env[65758]: _type = "Task" [ 681.308919] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.320936] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.412795] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Successfully updated port: 533485bf-4e5b-467a-a80c-4e9867e7efbe {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 681.466661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "refresh_cache-492d1063-8eaf-4207-8d65-341fbc0b6c39" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.467009] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Instance network_info: |[{"id": "67c82890-e746-45ff-9f1b-e905834b0064", "address": "fa:16:3e:21:f7:12", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c82890-e7", "ovs_interfaceid": "67c82890-e746-45ff-9f1b-e905834b0064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 681.467671] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:f7:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67c82890-e746-45ff-9f1b-e905834b0064', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 681.476156] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Creating folder: Project (efc1b1a2357a4f34b1093150b27de587). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 681.476952] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43d0eacd-c630-4bba-9f99-c1d6d39eb271 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.496824] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Created folder: Project (efc1b1a2357a4f34b1093150b27de587) in parent group-v909763. [ 681.497216] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Creating folder: Instances. Parent ref: group-v909837. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 681.506929] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8edbeaca-09c3-4774-8a56-386b3e787ea3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.527101] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Created folder: Instances in parent group-v909837. [ 681.527392] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 681.530503] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 681.530931] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf5e61be-fb59-4d5b-949a-4a9aecedec39 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.565458] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 681.565458] env[65758]: value = "task-4660065" [ 681.565458] env[65758]: _type = "Task" [ 681.565458] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.578109] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660065, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.587449] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Acquiring lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.587686] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.648653] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dbe949-7b14-454e-b392-fc51fe659274 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.660230] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efc0fd9-769e-45af-b913-337902161ca5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.698856] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fc44bb-4a2d-4893-802c-adf9f06ad292 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.711619] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c38e61-f2c8-4100-918d-4b2c24635ca1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.734842] env[65758]: DEBUG nova.compute.provider_tree [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.775476] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d88c5e-eb2c-64db-1394-da97f166e831, 'name': SearchDatastore_Task, 'duration_secs': 0.020623} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.776468] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5e4069f-9fb4-454e-b782-5dd3b9fa95ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.783563] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 681.783563] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52115dcf-020a-1d46-ae45-e52d1168357b" [ 681.783563] env[65758]: _type = "Task" [ 681.783563] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.794947] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52115dcf-020a-1d46-ae45-e52d1168357b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.820073] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069434} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.820073] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 681.820858] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eeed30-3b31-4df4-92ed-248ea7b3dafc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.845558] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 8eb65797-072b-4a7e-853d-26c0adc51bb2/8eb65797-072b-4a7e-853d-26c0adc51bb2.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 681.849085] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0fc962f-ac30-4274-8d2a-05dedaba8cb0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.865277] env[65758]: WARNING neutronclient.v2_0.client [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 681.865953] env[65758]: WARNING openstack [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 681.866328] env[65758]: WARNING openstack [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 681.874757] env[65758]: DEBUG nova.network.neutron [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Successfully created port: 3e23d45e-b849-47dd-9649-500080939b87 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 681.878258] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 681.878839] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 681.879242] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 681.893947] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 681.893947] env[65758]: value = "task-4660066" [ 681.893947] env[65758]: _type = "Task" [ 681.893947] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.904811] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660066, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.917739] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "refresh_cache-148eddf4-4c01-47bc-be81-451ca57e7347" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.917739] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "refresh_cache-148eddf4-4c01-47bc-be81-451ca57e7347" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.917739] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 682.078100] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660065, 'name': CreateVM_Task, 'duration_secs': 0.391136} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.078402] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 682.079477] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 682.079607] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.079670] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.079999] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 682.080343] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21607d74-8209-4090-b9f2-29a8d7d1e4b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.086922] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 682.086922] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f1cf4f-84ad-e8f0-5a1d-130f2fbbdcf4" [ 682.086922] env[65758]: _type = "Task" [ 682.086922] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.098641] env[65758]: DEBUG nova.compute.utils [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 682.101034] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f1cf4f-84ad-e8f0-5a1d-130f2fbbdcf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.132209] env[65758]: DEBUG nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 682.164411] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 682.165155] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.165471] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 682.165896] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.166194] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 682.166464] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 682.166800] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 682.167085] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 682.167386] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 682.167742] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 682.168061] env[65758]: DEBUG nova.virt.hardware [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 682.169170] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d138363-9371-4225-82ba-9fa4978514d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.178825] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c3bc15-84bd-4caa-8b27-affc018953da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.217661] env[65758]: WARNING neutronclient.v2_0.client [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 682.219030] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 682.220120] env[65758]: WARNING openstack [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 682.238663] env[65758]: DEBUG nova.scheduler.client.report [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.255624] env[65758]: DEBUG nova.network.neutron [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Updated VIF entry in instance network info cache for port 8c662ce6-206b-49ce-836c-0bbc9792f182. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 682.255954] env[65758]: DEBUG nova.network.neutron [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Updating instance_info_cache with network_info: [{"id": "8c662ce6-206b-49ce-836c-0bbc9792f182", "address": "fa:16:3e:60:f6:9a", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c662ce6-20", "ovs_interfaceid": "8c662ce6-206b-49ce-836c-0bbc9792f182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 682.295134] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52115dcf-020a-1d46-ae45-e52d1168357b, 'name': SearchDatastore_Task, 'duration_secs': 0.010531} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.297889] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Updating instance_info_cache with network_info: [{"id": "cd42e1b4-a8b0-4277-8fe9-1ea960c938ce", "address": "fa:16:3e:05:01:3d", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd42e1b4-a8", "ovs_interfaceid": "cd42e1b4-a8b0-4277-8fe9-1ea960c938ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 682.299242] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.299511] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 9e007d55-0a5c-4469-a546-9b18e188bea0/9e007d55-0a5c-4469-a546-9b18e188bea0.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.299990] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8acd7f21-b168-4b6e-899a-220ba666907c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.309572] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 682.309572] env[65758]: value = "task-4660067" [ 682.309572] env[65758]: _type = "Task" [ 682.309572] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.320362] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.373250] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Updated VIF entry in instance network info cache for port 0e626ecf-0686-4626-9e0c-31a51751b185. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 682.373609] env[65758]: DEBUG nova.network.neutron [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Updating instance_info_cache with network_info: [{"id": "0e626ecf-0686-4626-9e0c-31a51751b185", "address": "fa:16:3e:60:c5:ef", "network": {"id": "3770aad6-39a0-41da-84d1-b6aa69c0dfad", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-982589002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45aad313d10447e9ba61ed0a05b915ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e626ecf-06", "ovs_interfaceid": "0e626ecf-0686-4626-9e0c-31a51751b185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 682.407672] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.420367] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 682.420824] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 682.487980] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 682.599384] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f1cf4f-84ad-e8f0-5a1d-130f2fbbdcf4, 'name': SearchDatastore_Task, 'duration_secs': 0.018917} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.599694] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.600035] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 682.600304] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.600458] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.600636] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 682.600909] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d7b0fe9-496d-4ebe-8636-c1e39f2210db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.603276] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.016s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.611694] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 682.611771] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 682.612549] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20e432df-b585-4cdf-9763-cc1d4b607447 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.619050] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 682.619050] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5233bf75-e675-097c-2ea4-f3def93be04f" [ 682.619050] env[65758]: _type = "Task" [ 682.619050] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.628659] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5233bf75-e675-097c-2ea4-f3def93be04f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.738140] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 682.738824] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 682.739230] env[65758]: WARNING openstack [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 682.748369] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.748890] env[65758]: DEBUG nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 682.752089] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.355s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.752209] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.752358] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 682.752640] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.318s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.752953] env[65758]: DEBUG nova.objects.instance [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lazy-loading 'resources' on Instance uuid 28ccc013-962d-4607-83a2-5fcd480c27b2 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 682.755159] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaef513-1669-4f59-b797-cfcd29604146 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.758495] env[65758]: DEBUG oslo_concurrency.lockutils [req-4aea981b-a3dd-45e4-a225-f45f45485afe req-4f517454-20bb-420f-9ecc-14b4f50bac1e service nova] Releasing lock "refresh_cache-24016efd-cdb3-4c1e-9c08-8643400e729e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.767012] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910dd6dd-e212-404d-a2b4-bebac7d460a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.784572] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0862b374-45b5-4663-9764-46fb10281c8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.793163] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51735af9-1e1c-4330-99e0-ac6941dc3dfb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.801267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "refresh_cache-03073968-e679-4ce5-9f84-c4765217b308" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.801645] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Instance network_info: |[{"id": "cd42e1b4-a8b0-4277-8fe9-1ea960c938ce", "address": "fa:16:3e:05:01:3d", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd42e1b4-a8", "ovs_interfaceid": "cd42e1b4-a8b0-4277-8fe9-1ea960c938ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 682.827288] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:01:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd42e1b4-a8b0-4277-8fe9-1ea960c938ce', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.836996] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 682.838422] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179393MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 682.838594] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.839105] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 682.843320] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf211b98-89a5-49dc-b5f1-66ea3ec94234 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.871606] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660067, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.873270] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.873270] env[65758]: value = "task-4660068" [ 682.873270] env[65758]: _type = "Task" [ 682.873270] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.877394] env[65758]: DEBUG oslo_concurrency.lockutils [req-9f840746-e94a-4d9d-a833-6446dc388010 req-003482f4-e42b-455a-9e78-1aba125f12cf service nova] Releasing lock "refresh_cache-1e249ca9-a7a8-440f-832b-a8f5d84ada8b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.882699] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660068, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.909803] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660066, 'name': ReconfigVM_Task, 'duration_secs': 0.589085} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.910332] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 8eb65797-072b-4a7e-853d-26c0adc51bb2/8eb65797-072b-4a7e-853d-26c0adc51bb2.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 682.910945] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2da69d9f-51e8-4281-bfa8-d2e0e07876fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.921223] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 682.921223] env[65758]: value = "task-4660069" [ 682.921223] env[65758]: _type = "Task" [ 682.921223] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.935454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.935758] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.937475] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660069, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.132038] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5233bf75-e675-097c-2ea4-f3def93be04f, 'name': SearchDatastore_Task, 'duration_secs': 0.012208} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.132978] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-527895ad-c3af-47d6-a133-8c846a752b34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.139691] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 683.139691] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524e3db4-295a-0cc6-0b98-2384bbb403e2" [ 683.139691] env[65758]: _type = "Task" [ 683.139691] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.150352] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524e3db4-295a-0cc6-0b98-2384bbb403e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.256207] env[65758]: DEBUG nova.compute.utils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 683.257768] env[65758]: DEBUG nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 683.258028] env[65758]: DEBUG nova.network.neutron [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 683.259338] env[65758]: WARNING neutronclient.v2_0.client [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 683.260378] env[65758]: WARNING neutronclient.v2_0.client [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 683.260483] env[65758]: WARNING openstack [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 683.264283] env[65758]: WARNING openstack [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 683.355013] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660067, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.772306} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.355410] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 9e007d55-0a5c-4469-a546-9b18e188bea0/9e007d55-0a5c-4469-a546-9b18e188bea0.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.355642] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.356045] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5efd4fb9-df22-40db-87b7-a820c4821949 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.367383] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 683.367383] env[65758]: value = "task-4660070" [ 683.367383] env[65758]: _type = "Task" [ 683.367383] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.381986] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660070, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.388628] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660068, 'name': CreateVM_Task, 'duration_secs': 0.426127} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.392516] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.392516] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 683.392896] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.392896] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.393224] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 683.393381] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-220475e7-33bd-42f0-af09-abe8cb9b15d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.398727] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 683.398727] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52515f55-3a0c-1d78-34ff-d7c5eded4b63" [ 683.398727] env[65758]: _type = "Task" [ 683.398727] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.410987] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52515f55-3a0c-1d78-34ff-d7c5eded4b63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.433423] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660069, 'name': Rename_Task, 'duration_secs': 0.256567} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.433763] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 683.434314] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa3cdde2-6ebb-4587-8d34-8fa65fce12a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.438225] env[65758]: DEBUG nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 683.446551] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 683.446551] env[65758]: value = "task-4660071" [ 683.446551] env[65758]: _type = "Task" [ 683.446551] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.457497] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.467838] env[65758]: DEBUG nova.network.neutron [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Updating instance_info_cache with network_info: [{"id": "533485bf-4e5b-467a-a80c-4e9867e7efbe", "address": "fa:16:3e:28:16:66", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap533485bf-4e", "ovs_interfaceid": "533485bf-4e5b-467a-a80c-4e9867e7efbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 683.553114] env[65758]: DEBUG nova.network.neutron [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Successfully updated port: 3e23d45e-b849-47dd-9649-500080939b87 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 683.600769] env[65758]: DEBUG nova.policy [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a483acce57cc46188137a06750082635', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c18712cd526b4e25b07140cb554b04d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 683.652999] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524e3db4-295a-0cc6-0b98-2384bbb403e2, 'name': SearchDatastore_Task, 'duration_secs': 0.017244} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.653381] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.653745] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 492d1063-8eaf-4207-8d65-341fbc0b6c39/492d1063-8eaf-4207-8d65-341fbc0b6c39.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 683.654038] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39421cc4-c00b-4fdc-82a6-8efda3dee990 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.665598] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 683.665598] env[65758]: value = "task-4660072" [ 683.665598] env[65758]: _type = "Task" [ 683.665598] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.675549] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660072, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.701760] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Acquiring lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.702108] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.702390] env[65758]: INFO nova.compute.manager [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Attaching volume 34e992c3-1078-4a20-bf5f-0781ef54676c to /dev/sdb [ 683.771128] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080cc69c-1513-4928-b8f4-78c2042f5a80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.773585] env[65758]: DEBUG nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 683.780043] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ffb83e-a08a-4c86-9982-5cd815a4db83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.800872] env[65758]: DEBUG nova.virt.block_device [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Updating existing volume attachment record: f460b55e-7ea5-49c3-8286-8317594cb424 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 683.807714] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5ab630-31b6-4b57-bd8d-5704f25cdef5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.817127] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf37bc90-fc60-47a5-a61e-2adab510b869 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.857063] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5033a7-6e70-4184-8efb-d82b9ad12047 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.876351] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392cd858-b320-44fe-ab01-b1ea27b9b30d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.898920] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660070, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075181} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.899731] env[65758]: DEBUG nova.compute.provider_tree [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.902128] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 683.903347] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930019ca-7a55-406c-ac19-dc4a9d563701 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.918097] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52515f55-3a0c-1d78-34ff-d7c5eded4b63, 'name': SearchDatastore_Task, 'duration_secs': 0.011378} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.929861] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.930133] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.930422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.930695] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.930997] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 683.941450] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 9e007d55-0a5c-4469-a546-9b18e188bea0/9e007d55-0a5c-4469-a546-9b18e188bea0.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.941566] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cf4a885-b640-43cc-8e07-3749368d428f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.944424] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ffe9b88-b5cd-45cd-8b71-fc981d78e0b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.976304] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "refresh_cache-148eddf4-4c01-47bc-be81-451ca57e7347" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.976647] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Instance network_info: |[{"id": "533485bf-4e5b-467a-a80c-4e9867e7efbe", "address": "fa:16:3e:28:16:66", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap533485bf-4e", "ovs_interfaceid": "533485bf-4e5b-467a-a80c-4e9867e7efbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 683.977444] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:16:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '533485bf-4e5b-467a-a80c-4e9867e7efbe', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.989992] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 683.994853] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 683.995271] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 683.995271] env[65758]: value = "task-4660073" [ 683.995271] env[65758]: _type = "Task" [ 683.995271] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.995539] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 683.996904] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.000728] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8b37c38-4d90-4f41-a3ad-8a9d770fe7d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.017513] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a02f3f15-1953-47b4-8cf3-f09742412df9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.020725] env[65758]: DEBUG oslo_vmware.api [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660071, 'name': PowerOnVM_Task, 'duration_secs': 0.553229} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.022274] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.026270] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 684.026610] env[65758]: INFO nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Took 17.50 seconds to spawn the instance on the hypervisor. [ 684.026881] env[65758]: DEBUG nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 684.029435] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ed5091-4ec6-4cc7-b3a2-6a4d2ca29988 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.033981] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 684.033981] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52790988-157c-06fd-68f0-c76a27bda3a8" [ 684.033981] env[65758]: _type = "Task" [ 684.033981] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.046039] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660073, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.046432] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 684.046432] env[65758]: value = "task-4660074" [ 684.046432] env[65758]: _type = "Task" [ 684.046432] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.060343] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquiring lock "refresh_cache-b6b673e9-0ae1-4c7c-be53-e83641063cf8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.060583] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquired lock "refresh_cache-b6b673e9-0ae1-4c7c-be53-e83641063cf8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.060794] env[65758]: DEBUG nova.network.neutron [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 684.063086] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52790988-157c-06fd-68f0-c76a27bda3a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.064356] env[65758]: DEBUG nova.network.neutron [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Successfully created port: 9f2a1cde-126e-4502-835b-0bb1647edbfe {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 684.080956] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660074, 'name': CreateVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.179226] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660072, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.350601] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "b7692c74-c919-45b4-991b-c06a530ff9ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.350601] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7692c74-c919-45b4-991b-c06a530ff9ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.396160] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "de8f3600-b25f-4396-af37-ea703587979c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.396502] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "de8f3600-b25f-4396-af37-ea703587979c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.402868] env[65758]: DEBUG nova.scheduler.client.report [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 684.530238] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660073, 'name': ReconfigVM_Task, 'duration_secs': 0.422473} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.530625] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 9e007d55-0a5c-4469-a546-9b18e188bea0/9e007d55-0a5c-4469-a546-9b18e188bea0.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.531463] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03086998-c35a-4d96-a113-3f8720f74700 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.540975] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 684.540975] env[65758]: value = "task-4660079" [ 684.540975] env[65758]: _type = "Task" [ 684.540975] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.556438] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52790988-157c-06fd-68f0-c76a27bda3a8, 'name': SearchDatastore_Task, 'duration_secs': 0.06345} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.560733] env[65758]: INFO nova.compute.manager [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Took 46.88 seconds to build instance. [ 684.566333] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6f03349-b3a4-4e50-ba2e-683e230c3f43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.579027] env[65758]: WARNING openstack [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 684.579323] env[65758]: WARNING openstack [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 684.596329] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660079, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.609712] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 684.609712] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521d56b9-91c0-4a7a-6ebc-ccc02caa48e0" [ 684.609712] env[65758]: _type = "Task" [ 684.609712] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.610055] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660074, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.624360] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521d56b9-91c0-4a7a-6ebc-ccc02caa48e0, 'name': SearchDatastore_Task, 'duration_secs': 0.014168} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.625156] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.625612] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 03073968-e679-4ce5-9f84-c4765217b308/03073968-e679-4ce5-9f84-c4765217b308.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.625993] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73650ba5-15f8-41c1-a44f-6250baeb9823 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.635942] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 684.635942] env[65758]: value = "task-4660080" [ 684.635942] env[65758]: _type = "Task" [ 684.635942] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.650316] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660080, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.680661] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660072, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538418} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.681065] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 492d1063-8eaf-4207-8d65-341fbc0b6c39/492d1063-8eaf-4207-8d65-341fbc0b6c39.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 684.681387] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 684.681848] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-280aaf30-5f3e-4c29-890d-0dc1de898061 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.692330] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 684.692330] env[65758]: value = "task-4660081" [ 684.692330] env[65758]: _type = "Task" [ 684.692330] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.708838] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660081, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.785310] env[65758]: DEBUG nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 684.812428] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 684.812428] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.812428] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 684.812676] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.812676] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 684.812864] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 684.813232] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 684.813232] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 684.814000] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 684.814000] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 684.814000] env[65758]: DEBUG nova.virt.hardware [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 684.814787] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf8800c-ce3d-4664-8bc4-64ed18cf9b0c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.824626] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5d585b-51ca-4779-bf4f-41989ff3596a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.854617] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 684.908993] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.913208] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.411s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.913676] env[65758]: DEBUG nova.objects.instance [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lazy-loading 'resources' on Instance uuid 0ac196fa-d88c-45a8-999e-8b5216912041 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 684.945817] env[65758]: INFO nova.scheduler.client.report [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Deleted allocations for instance 28ccc013-962d-4607-83a2-5fcd480c27b2 [ 685.052236] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660079, 'name': Rename_Task, 'duration_secs': 0.183865} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.052439] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 685.052704] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed6c8dbc-6183-4690-98a4-d74c7d8c701a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.060721] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 685.060721] env[65758]: value = "task-4660082" [ 685.060721] env[65758]: _type = "Task" [ 685.060721] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.077091] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.077091] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660074, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.077179] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77998842-9367-49c2-b401-6f8b44b345eb tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "8eb65797-072b-4a7e-853d-26c0adc51bb2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.405s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.147788] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660080, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.203549] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660081, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069761} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.203952] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 685.204697] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8210809e-3655-48a5-ac85-e5522ded5e87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.228522] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 492d1063-8eaf-4207-8d65-341fbc0b6c39/492d1063-8eaf-4207-8d65-341fbc0b6c39.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 685.229320] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34debf16-0af9-45d2-848c-39bef553a7db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.253375] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 685.253375] env[65758]: value = "task-4660083" [ 685.253375] env[65758]: _type = "Task" [ 685.253375] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.263094] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660083, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.287845] env[65758]: DEBUG nova.network.neutron [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 685.385109] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.460668] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41f2c5db-d935-47b7-b5f7-e17505903161 tempest-ServerDiagnosticsTest-1410205495 tempest-ServerDiagnosticsTest-1410205495-project-member] Lock "28ccc013-962d-4607-83a2-5fcd480c27b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.847s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.586309] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 685.589531] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660082, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.589803] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660074, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.652770] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660080, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.777222] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660083, 'name': ReconfigVM_Task, 'duration_secs': 0.427701} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.777222] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 492d1063-8eaf-4207-8d65-341fbc0b6c39/492d1063-8eaf-4207-8d65-341fbc0b6c39.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 685.777222] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41141ec3-6487-4c45-b8ef-f1e1bb7d0eeb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.786558] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 685.786558] env[65758]: value = "task-4660084" [ 685.786558] env[65758]: _type = "Task" [ 685.786558] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.798130] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660084, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.851890] env[65758]: DEBUG nova.network.neutron [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Successfully updated port: 9f2a1cde-126e-4502-835b-0bb1647edbfe {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 685.997700] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c3420a-f149-4fd2-90a5-e94f7d815039 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.007553] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d8cab5-ee6e-46ad-94f9-b1fb068e65aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.048694] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b93aa5a-8441-43c5-bb5c-350312d6a66f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.057969] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cad632-83b5-4a60-a3d3-b2ae57394102 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.081774] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660074, 'name': CreateVM_Task, 'duration_secs': 1.629019} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.082763] env[65758]: DEBUG nova.compute.provider_tree [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.084989] env[65758]: WARNING neutronclient.v2_0.client [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 686.085627] env[65758]: WARNING openstack [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 686.085977] env[65758]: WARNING openstack [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 686.094271] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.098835] env[65758]: DEBUG nova.scheduler.client.report [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 686.104695] env[65758]: WARNING neutronclient.v2_0.client [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 686.105058] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.105207] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.105575] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 686.106413] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.106604] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.106766] env[65758]: DEBUG nova.compute.manager [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Going to confirm migration 1 {{(pid=65758) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 686.108050] env[65758]: DEBUG oslo_vmware.api [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660082, 'name': PowerOnVM_Task, 'duration_secs': 0.584484} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.110552] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87dadb0f-d8f6-4b3d-9924-746e8bbc1ac1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.112385] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 686.112614] env[65758]: INFO nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Took 16.89 seconds to spawn the instance on the hypervisor. [ 686.112746] env[65758]: DEBUG nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 686.115099] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df50124-46aa-4446-a23e-a6317661e854 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.121155] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 686.121155] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bc6908-9770-3d88-b021-ea13caa66edc" [ 686.121155] env[65758]: _type = "Task" [ 686.121155] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.129905] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.138022] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bc6908-9770-3d88-b021-ea13caa66edc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.148145] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660080, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.489485} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.148432] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 03073968-e679-4ce5-9f84-c4765217b308/03073968-e679-4ce5-9f84-c4765217b308.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 686.148679] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 686.149182] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b32b7a2a-ca4b-4c8f-ba98-f829d76f02e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.157323] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 686.157323] env[65758]: value = "task-4660085" [ 686.157323] env[65758]: _type = "Task" [ 686.157323] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.169451] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660085, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.297777] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660084, 'name': Rename_Task, 'duration_secs': 0.218057} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.297777] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 686.298602] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c4acafe-3a0c-477c-b145-90b32eb7d233 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.310755] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 686.310755] env[65758]: value = "task-4660087" [ 686.310755] env[65758]: _type = "Task" [ 686.310755] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.311035] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquiring lock "a2010738-759b-480a-8360-2639788056b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.311306] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "a2010738-759b-480a-8360-2639788056b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.311978] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquiring lock "a2010738-759b-480a-8360-2639788056b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.311978] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "a2010738-759b-480a-8360-2639788056b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.311978] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "a2010738-759b-480a-8360-2639788056b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.314274] env[65758]: INFO nova.compute.manager [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Terminating instance [ 686.329377] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.335588] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Received event network-vif-deleted-fc7dd128-390d-4176-b4ab-960fb037bc95 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 686.335789] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Received event network-vif-plugged-fc47a856-bb57-45b8-986b-bc9bcf87abe6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 686.335967] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquiring lock "9e007d55-0a5c-4469-a546-9b18e188bea0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.336194] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Lock "9e007d55-0a5c-4469-a546-9b18e188bea0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.336360] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Lock "9e007d55-0a5c-4469-a546-9b18e188bea0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.336521] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] No waiting events found dispatching network-vif-plugged-fc47a856-bb57-45b8-986b-bc9bcf87abe6 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 686.336680] env[65758]: WARNING nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Received unexpected event network-vif-plugged-fc47a856-bb57-45b8-986b-bc9bcf87abe6 for instance with vm_state active and task_state None. [ 686.336875] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Received event network-changed-fc47a856-bb57-45b8-986b-bc9bcf87abe6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 686.336984] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Refreshing instance network info cache due to event network-changed-fc47a856-bb57-45b8-986b-bc9bcf87abe6. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 686.337159] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquiring lock "refresh_cache-9e007d55-0a5c-4469-a546-9b18e188bea0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.337283] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquired lock "refresh_cache-9e007d55-0a5c-4469-a546-9b18e188bea0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.337439] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Refreshing network info cache for port fc47a856-bb57-45b8-986b-bc9bcf87abe6 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 686.357642] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquiring lock "refresh_cache-2bd02c6d-a139-4259-8b28-eed5efc5d094" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.357753] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquired lock "refresh_cache-2bd02c6d-a139-4259-8b28-eed5efc5d094" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.358162] env[65758]: DEBUG nova.network.neutron [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 686.613497] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.618546] env[65758]: WARNING neutronclient.v2_0.client [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 686.620661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.804s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.620890] env[65758]: DEBUG nova.objects.instance [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 686.644022] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bc6908-9770-3d88-b021-ea13caa66edc, 'name': SearchDatastore_Task, 'duration_secs': 0.016756} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.646154] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.646154] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.646334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.646504] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.646651] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.648993] env[65758]: INFO nova.compute.manager [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Took 44.18 seconds to build instance. [ 686.651437] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d31d7408-e612-4c09-9520-5839d932b201 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.655636] env[65758]: INFO nova.scheduler.client.report [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted allocations for instance 0ac196fa-d88c-45a8-999e-8b5216912041 [ 686.666090] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.666286] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.667431] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9de07c1-0249-4f22-93bb-1aa9542a698c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.674126] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068086} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.675659] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.676170] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 686.676170] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ab56f9-3551-c841-acb4-1878f697736b" [ 686.676170] env[65758]: _type = "Task" [ 686.676170] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.676885] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3460d4db-025f-4e4f-8762-c753f61ab339 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.692374] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ab56f9-3551-c841-acb4-1878f697736b, 'name': SearchDatastore_Task, 'duration_secs': 0.011762} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.712143] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 03073968-e679-4ce5-9f84-c4765217b308/03073968-e679-4ce5-9f84-c4765217b308.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.712455] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71780d52-d760-4907-874f-19c700d43d75 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.715315] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae610359-8ce2-4ce1-878b-52b915cd8fd1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.735486] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 686.735486] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520d7afc-d07b-35f3-f406-b7e01671ce13" [ 686.735486] env[65758]: _type = "Task" [ 686.735486] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.737340] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 686.737340] env[65758]: value = "task-4660088" [ 686.737340] env[65758]: _type = "Task" [ 686.737340] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.744714] env[65758]: DEBUG nova.network.neutron [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Updating instance_info_cache with network_info: [{"id": "3e23d45e-b849-47dd-9649-500080939b87", "address": "fa:16:3e:2a:7d:15", "network": {"id": "e9c8c55e-58ed-4ac8-bd45-2e7dc91beb47", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1571509614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "df8b333d08c54634b617dd8284143beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e23d45e-b8", "ovs_interfaceid": "3e23d45e-b849-47dd-9649-500080939b87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 686.761200] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.787978] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "24379189-b10a-4ef6-a3f6-b7bb43029dab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.788147] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "24379189-b10a-4ef6-a3f6-b7bb43029dab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.788616] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "24379189-b10a-4ef6-a3f6-b7bb43029dab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.788616] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "24379189-b10a-4ef6-a3f6-b7bb43029dab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.788947] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "24379189-b10a-4ef6-a3f6-b7bb43029dab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.791975] env[65758]: INFO nova.compute.manager [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Terminating instance [ 686.819471] env[65758]: DEBUG nova.compute.manager [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 686.821027] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 686.821027] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cff508-6959-499c-af3a-30ecc12b5f49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.834420] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Received event network-vif-plugged-13ded2e1-9fb6-4eed-b82c-82509e35b8fb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 686.834420] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Acquiring lock "8eb65797-072b-4a7e-853d-26c0adc51bb2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.834595] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Lock "8eb65797-072b-4a7e-853d-26c0adc51bb2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.834683] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Lock "8eb65797-072b-4a7e-853d-26c0adc51bb2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.834823] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] No waiting events found dispatching network-vif-plugged-13ded2e1-9fb6-4eed-b82c-82509e35b8fb {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 686.834998] env[65758]: WARNING nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Received unexpected event network-vif-plugged-13ded2e1-9fb6-4eed-b82c-82509e35b8fb for instance with vm_state active and task_state None. [ 686.835202] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Received event network-changed-13ded2e1-9fb6-4eed-b82c-82509e35b8fb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 686.835375] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Refreshing instance network info cache due to event network-changed-13ded2e1-9fb6-4eed-b82c-82509e35b8fb. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 686.835676] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Acquiring lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.835866] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Acquired lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.836066] env[65758]: DEBUG nova.network.neutron [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Refreshing network info cache for port 13ded2e1-9fb6-4eed-b82c-82509e35b8fb {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 686.840361] env[65758]: WARNING neutronclient.v2_0.client [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 686.841366] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 686.841814] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 686.861409] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 686.864219] env[65758]: WARNING openstack [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 686.864219] env[65758]: WARNING openstack [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 686.871120] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660087, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.872030] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48a15649-3ef6-40f4-bdc9-20698bf830fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.881239] env[65758]: DEBUG oslo_vmware.api [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 686.881239] env[65758]: value = "task-4660089" [ 686.881239] env[65758]: _type = "Task" [ 686.881239] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.893760] env[65758]: DEBUG oslo_vmware.api [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.919202] env[65758]: WARNING neutronclient.v2_0.client [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 686.919202] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.919202] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.919202] env[65758]: DEBUG nova.network.neutron [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 686.919202] env[65758]: DEBUG nova.objects.instance [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lazy-loading 'info_cache' on Instance uuid 83fa942b-a195-4bcb-9ed5-5bb6764220a4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 687.051624] env[65758]: DEBUG nova.network.neutron [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 687.155197] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24fb300c-3a6e-415a-91d3-aa4cfbeea6b4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "9e007d55-0a5c-4469-a546-9b18e188bea0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.701s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.200402] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9bf6bc94-92b8-4223-8a11-73a17145aad6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "0ac196fa-d88c-45a8-999e-8b5216912041" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.384s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.250456] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Releasing lock "refresh_cache-b6b673e9-0ae1-4c7c-be53-e83641063cf8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.250995] env[65758]: DEBUG nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Instance network_info: |[{"id": "3e23d45e-b849-47dd-9649-500080939b87", "address": "fa:16:3e:2a:7d:15", "network": {"id": "e9c8c55e-58ed-4ac8-bd45-2e7dc91beb47", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1571509614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "df8b333d08c54634b617dd8284143beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e23d45e-b8", "ovs_interfaceid": "3e23d45e-b849-47dd-9649-500080939b87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 687.260381] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:7d:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e23d45e-b849-47dd-9649-500080939b87', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.269176] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Creating folder: Project (df8b333d08c54634b617dd8284143beb). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.269753] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520d7afc-d07b-35f3-f406-b7e01671ce13, 'name': SearchDatastore_Task, 'duration_secs': 0.047076} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.270356] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660088, 'name': ReconfigVM_Task, 'duration_secs': 0.381991} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.270795] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66e51384-0924-420c-a55c-848dee5d30da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.273851] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.274255] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 148eddf4-4c01-47bc-be81-451ca57e7347/148eddf4-4c01-47bc-be81-451ca57e7347.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.275369] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 03073968-e679-4ce5-9f84-c4765217b308/03073968-e679-4ce5-9f84-c4765217b308.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.275369] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cea8b7b-6512-40e7-8e7c-1594dfdc2760 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.277719] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ed0cc4f-efa7-463e-af60-0fe396230933 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.288180] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 687.288180] env[65758]: value = "task-4660090" [ 687.288180] env[65758]: _type = "Task" [ 687.288180] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.290812] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 687.290812] env[65758]: value = "task-4660091" [ 687.290812] env[65758]: _type = "Task" [ 687.290812] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.298266] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "refresh_cache-24379189-b10a-4ef6-a3f6-b7bb43029dab" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.298266] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquired lock "refresh_cache-24379189-b10a-4ef6-a3f6-b7bb43029dab" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.298634] env[65758]: DEBUG nova.network.neutron [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 687.314101] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660090, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.325335] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660091, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.332656] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Created folder: Project (df8b333d08c54634b617dd8284143beb) in parent group-v909763. [ 687.334302] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Creating folder: Instances. Parent ref: group-v909845. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.334302] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78619542-cdf4-4e50-ad89-8120229ad5c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.341902] env[65758]: WARNING neutronclient.v2_0.client [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.342640] env[65758]: WARNING openstack [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 687.343648] env[65758]: WARNING openstack [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 687.357301] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660087, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.374843] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Created folder: Instances in parent group-v909845. [ 687.374843] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 687.374843] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.374843] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44aa539f-cd0b-402e-ba87-802a254c10d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.402632] env[65758]: DEBUG oslo_vmware.api [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660089, 'name': PowerOffVM_Task, 'duration_secs': 0.330967} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.405232] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 687.405584] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 687.405938] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.405938] env[65758]: value = "task-4660094" [ 687.405938] env[65758]: _type = "Task" [ 687.405938] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.406314] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65971062-e9af-4901-880b-83e7308f249a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.425275] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660094, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.505132] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 687.505366] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 687.509088] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Deleting the datastore file [datastore2] a2010738-759b-480a-8360-2639788056b1 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 687.509088] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-027a9a8c-ea81-473d-9275-a200663f61f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.516484] env[65758]: DEBUG oslo_vmware.api [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for the task: (returnval){ [ 687.516484] env[65758]: value = "task-4660096" [ 687.516484] env[65758]: _type = "Task" [ 687.516484] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.527294] env[65758]: DEBUG oslo_vmware.api [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.634690] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f2c0adff-6b47-4cf6-a6ef-ee4db606eaf0 tempest-ServersAdmin275Test-1976369394 tempest-ServersAdmin275Test-1976369394-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.635431] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.734s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.638392] env[65758]: DEBUG nova.objects.instance [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lazy-loading 'resources' on Instance uuid 83b637d8-b9fa-4159-b879-c1d737871539 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 687.808934] env[65758]: WARNING neutronclient.v2_0.client [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.808934] env[65758]: WARNING openstack [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 687.809347] env[65758]: WARNING openstack [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 687.835051] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660090, 'name': Rename_Task, 'duration_secs': 0.186504} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.835620] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660091, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.840339] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.840761] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12dc1b35-d77c-4d1c-8d0a-8298f3991d25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.853743] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660087, 'name': PowerOnVM_Task, 'duration_secs': 1.412582} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.856417] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 687.856575] env[65758]: INFO nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Took 14.60 seconds to spawn the instance on the hypervisor. [ 687.857123] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 687.857474] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 687.857474] env[65758]: value = "task-4660097" [ 687.857474] env[65758]: _type = "Task" [ 687.857474] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.858277] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472cbe94-de06-411f-8730-16229f570b6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.876939] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660097, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.920360] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660094, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.926662] env[65758]: WARNING neutronclient.v2_0.client [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.927385] env[65758]: WARNING openstack [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 687.927856] env[65758]: WARNING openstack [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 687.996038] env[65758]: WARNING neutronclient.v2_0.client [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 687.997103] env[65758]: WARNING openstack [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 687.997671] env[65758]: WARNING openstack [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.031313] env[65758]: DEBUG oslo_vmware.api [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.072976] env[65758]: DEBUG nova.network.neutron [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 688.101435] env[65758]: WARNING neutronclient.v2_0.client [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.102535] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 688.103108] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.315560] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660091, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.759812} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.315560] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 148eddf4-4c01-47bc-be81-451ca57e7347/148eddf4-4c01-47bc-be81-451ca57e7347.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.315560] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.319041] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14716360-23dd-4e6a-a6bf-e0fb388c2c52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.329363] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 688.329363] env[65758]: value = "task-4660098" [ 688.329363] env[65758]: _type = "Task" [ 688.329363] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.346362] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660098, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.377843] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660097, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.399321] env[65758]: INFO nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Took 38.67 seconds to build instance. [ 688.427796] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660094, 'name': CreateVM_Task, 'duration_secs': 0.560136} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.431323] env[65758]: DEBUG nova.network.neutron [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Updating instance_info_cache with network_info: [{"id": "9f2a1cde-126e-4502-835b-0bb1647edbfe", "address": "fa:16:3e:07:42:48", "network": {"id": "0179f052-13e6-4824-928b-95f34f098cfe", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2018208707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c18712cd526b4e25b07140cb554b04d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd998416-f3d6-4a62-b828-5011063ce76a", "external-id": "nsx-vlan-transportzone-57", "segmentation_id": 57, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2a1cde-12", "ovs_interfaceid": "9f2a1cde-126e-4502-835b-0bb1647edbfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 688.432666] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 688.433541] env[65758]: WARNING neutronclient.v2_0.client [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.433935] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.434095] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.435838] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 688.438770] env[65758]: DEBUG nova.network.neutron [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 688.440170] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30428c0e-af3f-49ae-bdc1-ec95997cbca1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.452229] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 688.452229] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520a4a8a-bfe9-4c1b-0b60-afc9d6fd9a48" [ 688.452229] env[65758]: _type = "Task" [ 688.452229] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.464276] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520a4a8a-bfe9-4c1b-0b60-afc9d6fd9a48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.501978] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Updated VIF entry in instance network info cache for port fc47a856-bb57-45b8-986b-bc9bcf87abe6. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 688.502381] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Updating instance_info_cache with network_info: [{"id": "fc47a856-bb57-45b8-986b-bc9bcf87abe6", "address": "fa:16:3e:cc:22:d7", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc47a856-bb", "ovs_interfaceid": "fc47a856-bb57-45b8-986b-bc9bcf87abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 688.538358] env[65758]: DEBUG oslo_vmware.api [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Task: {'id': task-4660096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.532246} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.539650] env[65758]: WARNING neutronclient.v2_0.client [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.540310] env[65758]: WARNING openstack [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 688.540657] env[65758]: WARNING openstack [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.548301] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 688.548585] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 688.548688] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.548833] env[65758]: INFO nova.compute.manager [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] [instance: a2010738-759b-480a-8360-2639788056b1] Took 1.73 seconds to destroy the instance on the hypervisor. [ 688.549137] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 688.552514] env[65758]: DEBUG nova.compute.manager [-] [instance: a2010738-759b-480a-8360-2639788056b1] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 688.552645] env[65758]: DEBUG nova.network.neutron [-] [instance: a2010738-759b-480a-8360-2639788056b1] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 688.552877] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.553496] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 688.553675] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.597511] env[65758]: DEBUG nova.compute.manager [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Received event network-changed {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 688.597511] env[65758]: DEBUG nova.compute.manager [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Refreshing instance network info cache due to event network-changed. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 688.597511] env[65758]: DEBUG oslo_concurrency.lockutils [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] Acquiring lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.685662] env[65758]: WARNING neutronclient.v2_0.client [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.686222] env[65758]: WARNING openstack [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 688.687420] env[65758]: WARNING openstack [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 688.701797] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 688.746111] env[65758]: DEBUG nova.network.neutron [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Updated VIF entry in instance network info cache for port 13ded2e1-9fb6-4eed-b82c-82509e35b8fb. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 688.746848] env[65758]: DEBUG nova.network.neutron [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Updating instance_info_cache with network_info: [{"id": "13ded2e1-9fb6-4eed-b82c-82509e35b8fb", "address": "fa:16:3e:5b:e3:fa", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13ded2e1-9f", "ovs_interfaceid": "13ded2e1-9fb6-4eed-b82c-82509e35b8fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 688.845670] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d564f3-3736-4aad-880e-38546ccb02ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.854204] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660098, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090211} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.857550] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 688.857550] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26410f21-a9de-4f97-bd60-82842620adde {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.861385] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788b410e-2360-425e-9345-73a6b156fde3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.887487] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 148eddf4-4c01-47bc-be81-451ca57e7347/148eddf4-4c01-47bc-be81-451ca57e7347.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 688.916298] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cccfe1a-8c66-41e7-9e18-796338b8b111 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.934146] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "492d1063-8eaf-4207-8d65-341fbc0b6c39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.239s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.936585] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 688.936812] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909844', 'volume_id': '34e992c3-1078-4a20-bf5f-0781ef54676c', 'name': 'volume-34e992c3-1078-4a20-bf5f-0781ef54676c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e60efbcd-1c4e-40a1-8bc1-893daa511073', 'attached_at': '', 'detached_at': '', 'volume_id': '34e992c3-1078-4a20-bf5f-0781ef54676c', 'serial': '34e992c3-1078-4a20-bf5f-0781ef54676c'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 688.940781] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9253721e-1948-4e8a-8be9-5661a58fa278 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.945204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Releasing lock "refresh_cache-2bd02c6d-a139-4259-8b28-eed5efc5d094" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.945204] env[65758]: DEBUG nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Instance network_info: |[{"id": "9f2a1cde-126e-4502-835b-0bb1647edbfe", "address": "fa:16:3e:07:42:48", "network": {"id": "0179f052-13e6-4824-928b-95f34f098cfe", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2018208707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c18712cd526b4e25b07140cb554b04d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd998416-f3d6-4a62-b828-5011063ce76a", "external-id": "nsx-vlan-transportzone-57", "segmentation_id": 57, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2a1cde-12", "ovs_interfaceid": "9f2a1cde-126e-4502-835b-0bb1647edbfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 688.945204] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec728c65-6b29-4fbe-adc8-4b944f8b7f47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.947487] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660097, 'name': PowerOnVM_Task, 'duration_secs': 0.753049} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.948300] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Releasing lock "refresh_cache-24379189-b10a-4ef6-a3f6-b7bb43029dab" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.948672] env[65758]: DEBUG nova.compute.manager [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 688.949114] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.949287] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:42:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd998416-f3d6-4a62-b828-5011063ce76a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f2a1cde-126e-4502-835b-0bb1647edbfe', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.956833] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Creating folder: Project (c18712cd526b4e25b07140cb554b04d0). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 688.957179] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 688.957381] env[65758]: INFO nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Took 13.09 seconds to spawn the instance on the hypervisor. [ 688.957551] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 688.959578] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db040086-c984-44ae-b238-88cc545a6473 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.963369] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa37e4b3-7500-43aa-8f1a-1033f7c9242c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.965703] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34be986-95a8-4184-abe2-647d5aef4fef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.968613] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 688.968613] env[65758]: value = "task-4660099" [ 688.968613] env[65758]: _type = "Task" [ 688.968613] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.992345] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39feb61-e568-41f5-b350-32c147daf914 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.997739] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0567ae5-7d2d-43f5-815c-8e8f2d2f828d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.008916] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 689.009298] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Created folder: Project (c18712cd526b4e25b07140cb554b04d0) in parent group-v909763. [ 689.009502] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Creating folder: Instances. Parent ref: group-v909848. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.010263] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Releasing lock "refresh_cache-9e007d55-0a5c-4469-a546-9b18e188bea0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.010263] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Received event network-vif-plugged-cd42e1b4-a8b0-4277-8fe9-1ea960c938ce {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 689.010538] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquiring lock "03073968-e679-4ce5-9f84-c4765217b308-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.010885] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Lock "03073968-e679-4ce5-9f84-c4765217b308-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.011140] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Lock "03073968-e679-4ce5-9f84-c4765217b308-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.011345] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] No waiting events found dispatching network-vif-plugged-cd42e1b4-a8b0-4277-8fe9-1ea960c938ce {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 689.011838] env[65758]: WARNING nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Received unexpected event network-vif-plugged-cd42e1b4-a8b0-4277-8fe9-1ea960c938ce for instance with vm_state building and task_state spawning. [ 689.011838] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Received event network-changed-cd42e1b4-a8b0-4277-8fe9-1ea960c938ce {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 689.011838] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Refreshing instance network info cache due to event network-changed-cd42e1b4-a8b0-4277-8fe9-1ea960c938ce. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 689.011986] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquiring lock "refresh_cache-03073968-e679-4ce5-9f84-c4765217b308" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.012181] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquired lock "refresh_cache-03073968-e679-4ce5-9f84-c4765217b308" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.012292] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Refreshing network info cache for port cd42e1b4-a8b0-4277-8fe9-1ea960c938ce {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 689.017862] env[65758]: DEBUG nova.network.neutron [-] [instance: a2010738-759b-480a-8360-2639788056b1] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 689.024952] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2b1a268-5592-49c6-b4be-2f62cfc6ce1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.028145] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af829ae0-3232-41f9-a39e-2bdd7e6db2e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.032819] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520a4a8a-bfe9-4c1b-0b60-afc9d6fd9a48, 'name': SearchDatastore_Task, 'duration_secs': 0.014837} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.039906] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.040324] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 689.040549] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.040694] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.040870] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 689.042049] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.051752] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c28bc553-e7b1-43c9-a004-78e31b35196c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.055445] env[65758]: DEBUG nova.compute.provider_tree [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.077101] env[65758]: DEBUG nova.network.neutron [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance_info_cache with network_info: [{"id": "4741e651-cd1e-4ea0-b378-213efedb59d4", "address": "fa:16:3e:9f:a7:58", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4741e651-cd", "ovs_interfaceid": "4741e651-cd1e-4ea0-b378-213efedb59d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 689.079693] env[65758]: DEBUG oslo_vmware.api [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 689.079693] env[65758]: value = "task-4660101" [ 689.079693] env[65758]: _type = "Task" [ 689.079693] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.087108] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] volume-34e992c3-1078-4a20-bf5f-0781ef54676c/volume-34e992c3-1078-4a20-bf5f-0781ef54676c.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.089673] env[65758]: DEBUG nova.scheduler.client.report [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.094641] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8ed36a4-3aaf-46b0-aec9-8a2f95efe0d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.109094] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Created folder: Instances in parent group-v909848. [ 689.109366] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 689.110852] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.475s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.117811] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.118190] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.811s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.119783] env[65758]: INFO nova.compute.claims [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 689.122944] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 689.123034] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 689.127199] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f83ffc5-9402-47a7-9bd8-9b637590023f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.139902] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45eacbb0-c38b-48f9-b16e-d8b734084761 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.144501] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Waiting for the task: (returnval){ [ 689.144501] env[65758]: value = "task-4660103" [ 689.144501] env[65758]: _type = "Task" [ 689.144501] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.156421] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 689.156421] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ca95db-a3bb-5f16-1975-c52dbdd6d0f8" [ 689.156421] env[65758]: _type = "Task" [ 689.156421] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.156642] env[65758]: DEBUG oslo_vmware.api [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4660101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.156828] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.156828] env[65758]: value = "task-4660104" [ 689.156828] env[65758]: _type = "Task" [ 689.156828] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.169756] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.171102] env[65758]: INFO nova.scheduler.client.report [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Deleted allocations for instance 83b637d8-b9fa-4159-b879-c1d737871539 [ 689.186711] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ca95db-a3bb-5f16-1975-c52dbdd6d0f8, 'name': SearchDatastore_Task, 'duration_secs': 0.014434} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.186711] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660104, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.186711] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c84cf19-0441-4757-90b3-4ae28298e3a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.198987] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 689.198987] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e7668e-6baf-4a81-815f-a670be3ba183" [ 689.198987] env[65758]: _type = "Task" [ 689.198987] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.213213] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "483765b5-c63c-4aac-9082-519bbc4e6eb5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.213544] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.213799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.214046] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.214229] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.216572] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e7668e-6baf-4a81-815f-a670be3ba183, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.218095] env[65758]: INFO nova.compute.manager [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Terminating instance [ 689.253093] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Releasing lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.253093] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Received event network-vif-deleted-f909dddc-4c03-4424-acfc-d0739864ec6e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 689.253093] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Received event network-vif-plugged-67c82890-e746-45ff-9f1b-e905834b0064 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 689.253093] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Acquiring lock "492d1063-8eaf-4207-8d65-341fbc0b6c39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.253093] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Lock "492d1063-8eaf-4207-8d65-341fbc0b6c39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.253093] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Lock "492d1063-8eaf-4207-8d65-341fbc0b6c39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.253093] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] No waiting events found dispatching network-vif-plugged-67c82890-e746-45ff-9f1b-e905834b0064 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 689.253590] env[65758]: WARNING nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Received unexpected event network-vif-plugged-67c82890-e746-45ff-9f1b-e905834b0064 for instance with vm_state building and task_state spawning. [ 689.255374] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Received event network-changed-67c82890-e746-45ff-9f1b-e905834b0064 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 689.255374] env[65758]: DEBUG nova.compute.manager [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Refreshing instance network info cache due to event network-changed-67c82890-e746-45ff-9f1b-e905834b0064. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 689.255374] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Acquiring lock "refresh_cache-492d1063-8eaf-4207-8d65-341fbc0b6c39" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.255802] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Acquired lock "refresh_cache-492d1063-8eaf-4207-8d65-341fbc0b6c39" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.255957] env[65758]: DEBUG nova.network.neutron [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Refreshing network info cache for port 67c82890-e746-45ff-9f1b-e905834b0064 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 689.258026] env[65758]: DEBUG oslo_concurrency.lockutils [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] Acquired lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.258026] env[65758]: DEBUG nova.network.neutron [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 689.498420] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.518835] env[65758]: WARNING neutronclient.v2_0.client [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 689.520393] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 689.520393] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 689.548100] env[65758]: INFO nova.compute.manager [-] [instance: a2010738-759b-480a-8360-2639788056b1] Took 0.99 seconds to deallocate network for instance. [ 689.552978] env[65758]: INFO nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Took 38.27 seconds to build instance. [ 689.594839] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-83fa942b-a195-4bcb-9ed5-5bb6764220a4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.596072] env[65758]: DEBUG nova.objects.instance [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lazy-loading 'migration_context' on Instance uuid 83fa942b-a195-4bcb-9ed5-5bb6764220a4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 689.615433] env[65758]: DEBUG oslo_vmware.api [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4660101, 'name': PowerOffVM_Task, 'duration_secs': 0.207668} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.616420] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 689.616420] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 689.616420] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8d8d1f3-0248-412c-bf6b-56bd2286dbf4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.652501] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 689.654794] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 689.654794] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Deleting the datastore file [datastore2] 24379189-b10a-4ef6-a3f6-b7bb43029dab {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 689.654794] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cd6c3f0-30dd-4654-bd9d-3ca3f8b4b491 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.663479] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.670740] env[65758]: DEBUG oslo_vmware.api [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for the task: (returnval){ [ 689.670740] env[65758]: value = "task-4660106" [ 689.670740] env[65758]: _type = "Task" [ 689.670740] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.682513] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660104, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.693200] env[65758]: DEBUG oslo_vmware.api [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4660106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.693200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3b2a970-8c5e-435a-bff5-5c46b62f189d tempest-ServerAddressesNegativeTestJSON-763966829 tempest-ServerAddressesNegativeTestJSON-763966829-project-member] Lock "83b637d8-b9fa-4159-b879-c1d737871539" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.567s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.711090] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e7668e-6baf-4a81-815f-a670be3ba183, 'name': SearchDatastore_Task, 'duration_secs': 0.015548} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.711731] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.712225] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] b6b673e9-0ae1-4c7c-be53-e83641063cf8/b6b673e9-0ae1-4c7c-be53-e83641063cf8.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 689.712665] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2cda234c-f00a-427d-8e3a-86acb71af670 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.722029] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 689.722029] env[65758]: value = "task-4660107" [ 689.722029] env[65758]: _type = "Task" [ 689.722029] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.729433] env[65758]: DEBUG nova.compute.manager [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 689.730034] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 689.731413] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0e2bff-bf83-43ff-87c5-db9bc426e235 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.742551] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.754072] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 689.754072] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-985d6050-3206-4c8b-9e73-e05d44d281f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.767159] env[65758]: WARNING neutronclient.v2_0.client [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 689.767159] env[65758]: WARNING openstack [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 689.767159] env[65758]: WARNING openstack [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 689.775578] env[65758]: WARNING neutronclient.v2_0.client [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 689.777257] env[65758]: WARNING openstack [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 689.777664] env[65758]: WARNING openstack [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 689.789711] env[65758]: DEBUG oslo_vmware.api [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 689.789711] env[65758]: value = "task-4660108" [ 689.789711] env[65758]: _type = "Task" [ 689.789711] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.803161] env[65758]: DEBUG oslo_vmware.api [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660108, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.001043] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660099, 'name': ReconfigVM_Task, 'duration_secs': 0.694848} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.001043] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 148eddf4-4c01-47bc-be81-451ca57e7347/148eddf4-4c01-47bc-be81-451ca57e7347.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.001911] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fcb0886-1bb9-4cb9-816e-f549434fb18a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.011933] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 690.011933] env[65758]: value = "task-4660109" [ 690.011933] env[65758]: _type = "Task" [ 690.011933] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.022549] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660109, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.055479] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "03073968-e679-4ce5-9f84-c4765217b308" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.309s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.075264] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.104026] env[65758]: DEBUG nova.objects.base [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Object Instance<83fa942b-a195-4bcb-9ed5-5bb6764220a4> lazy-loaded attributes: info_cache,migration_context {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 690.104026] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6131a69-5105-4f2e-a41c-6d5ae27383cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.130754] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20592f6a-b90d-4900-8b1b-3a9658580eed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.142709] env[65758]: DEBUG oslo_vmware.api [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 690.142709] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f6f1db-8385-6e6f-3a28-9dcd4b29502d" [ 690.142709] env[65758]: _type = "Task" [ 690.142709] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.156602] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660103, 'name': ReconfigVM_Task, 'duration_secs': 0.746357} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.160674] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Reconfigured VM instance instance-0000000d to attach disk [datastore1] volume-34e992c3-1078-4a20-bf5f-0781ef54676c/volume-34e992c3-1078-4a20-bf5f-0781ef54676c.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.166638] env[65758]: DEBUG oslo_vmware.api [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f6f1db-8385-6e6f-3a28-9dcd4b29502d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.167026] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c4416bf-74dc-433c-b8be-084d9fb8e9f2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.198934] env[65758]: WARNING neutronclient.v2_0.client [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.199728] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 690.200113] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 690.209009] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.209884] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.210669] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Waiting for the task: (returnval){ [ 690.210669] env[65758]: value = "task-4660110" [ 690.210669] env[65758]: _type = "Task" [ 690.210669] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.220701] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660104, 'name': CreateVM_Task, 'duration_secs': 0.622909} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.225842] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 690.232551] env[65758]: WARNING neutronclient.v2_0.client [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.232551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.232551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.232551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 690.232803] env[65758]: DEBUG oslo_vmware.api [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Task: {'id': task-4660106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151224} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.241065] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0c1ac80-73b0-482d-96b9-da8b78839045 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.243358] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 690.243617] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 690.243835] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.244078] env[65758]: INFO nova.compute.manager [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Took 1.30 seconds to destroy the instance on the hypervisor. [ 690.244416] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 690.253246] env[65758]: DEBUG nova.compute.manager [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 690.253491] env[65758]: DEBUG nova.network.neutron [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 690.253765] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.254409] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 690.254761] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 690.262267] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.267759] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 690.267759] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52259fa5-636d-66cd-063a-48fa4abd31c2" [ 690.267759] env[65758]: _type = "Task" [ 690.267759] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.272575] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660107, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.283336] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52259fa5-636d-66cd-063a-48fa4abd31c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.304819] env[65758]: DEBUG oslo_vmware.api [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660108, 'name': PowerOffVM_Task, 'duration_secs': 0.291205} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.307140] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 690.307327] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 690.309219] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e23f5c4-8a5c-451f-b42b-0d8f79c7873c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.391272] env[65758]: DEBUG nova.network.neutron [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 690.391272] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.482297] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Updated VIF entry in instance network info cache for port cd42e1b4-a8b0-4277-8fe9-1ea960c938ce. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 690.482297] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Updating instance_info_cache with network_info: [{"id": "cd42e1b4-a8b0-4277-8fe9-1ea960c938ce", "address": "fa:16:3e:05:01:3d", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd42e1b4-a8", "ovs_interfaceid": "cd42e1b4-a8b0-4277-8fe9-1ea960c938ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 690.500411] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 690.500707] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 690.500707] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Deleting the datastore file [datastore2] 483765b5-c63c-4aac-9082-519bbc4e6eb5 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 690.501071] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56d686b0-8382-43d1-b2b1-df369589d12d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.511300] env[65758]: DEBUG oslo_vmware.api [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 690.511300] env[65758]: value = "task-4660112" [ 690.511300] env[65758]: _type = "Task" [ 690.511300] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.520098] env[65758]: WARNING neutronclient.v2_0.client [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.521024] env[65758]: WARNING openstack [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 690.521232] env[65758]: WARNING openstack [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 690.539341] env[65758]: DEBUG oslo_vmware.api [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.547455] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660109, 'name': Rename_Task, 'duration_secs': 0.479658} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.552743] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.552743] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4214d5c3-09a5-4169-b457-c08365492c7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.569299] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 690.569299] env[65758]: value = "task-4660113" [ 690.569299] env[65758]: _type = "Task" [ 690.569299] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.570666] env[65758]: WARNING neutronclient.v2_0.client [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 690.571541] env[65758]: WARNING openstack [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 690.572219] env[65758]: WARNING openstack [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 690.607020] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660113, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.656337] env[65758]: DEBUG oslo_vmware.api [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f6f1db-8385-6e6f-3a28-9dcd4b29502d, 'name': SearchDatastore_Task, 'duration_secs': 0.040223} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.656337] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.711532] env[65758]: DEBUG nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 690.733160] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660110, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.743769] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685071} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.745011] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] b6b673e9-0ae1-4c7c-be53-e83641063cf8/b6b673e9-0ae1-4c7c-be53-e83641063cf8.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 690.745325] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 690.745593] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-892c5074-c8d0-4736-b4f7-7d7948a39848 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.756169] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 690.756169] env[65758]: value = "task-4660114" [ 690.756169] env[65758]: _type = "Task" [ 690.756169] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.773315] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660114, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.790440] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52259fa5-636d-66cd-063a-48fa4abd31c2, 'name': SearchDatastore_Task, 'duration_secs': 0.058274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.794297] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.794566] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.797022] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.797022] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.797022] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.797022] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5710f39a-bcfc-49b8-8d76-3e995dcb54f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.809981] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.810241] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.811219] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ed4464-b763-4271-a2bc-1bb5cb790635 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.825477] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 690.825477] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529976b8-0f48-58e0-a6b8-76286a1fd1d0" [ 690.825477] env[65758]: _type = "Task" [ 690.825477] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.832735] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b669de-5376-44a0-a152-3d78372386cd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.839212] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529976b8-0f48-58e0-a6b8-76286a1fd1d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.847648] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f566c7-90bd-4e96-b09f-e9d8e49c9bd3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.885758] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f4a6c0-0f47-4bdc-97e3-fadedd16ccca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.895343] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1093d99-b37b-456a-9036-a29367b56e6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.911878] env[65758]: DEBUG nova.network.neutron [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 690.913590] env[65758]: DEBUG nova.compute.provider_tree [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.985527] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Releasing lock "refresh_cache-03073968-e679-4ce5-9f84-c4765217b308" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.986856] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Received event network-vif-plugged-533485bf-4e5b-467a-a80c-4e9867e7efbe {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 690.987162] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquiring lock "148eddf4-4c01-47bc-be81-451ca57e7347-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.987505] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Lock "148eddf4-4c01-47bc-be81-451ca57e7347-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.987774] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Lock "148eddf4-4c01-47bc-be81-451ca57e7347-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.988060] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] No waiting events found dispatching network-vif-plugged-533485bf-4e5b-467a-a80c-4e9867e7efbe {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 690.988255] env[65758]: WARNING nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Received unexpected event network-vif-plugged-533485bf-4e5b-467a-a80c-4e9867e7efbe for instance with vm_state building and task_state spawning. [ 690.988425] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Received event network-changed-533485bf-4e5b-467a-a80c-4e9867e7efbe {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 690.988681] env[65758]: DEBUG nova.compute.manager [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Refreshing instance network info cache due to event network-changed-533485bf-4e5b-467a-a80c-4e9867e7efbe. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 690.988744] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquiring lock "refresh_cache-148eddf4-4c01-47bc-be81-451ca57e7347" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.988876] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Acquired lock "refresh_cache-148eddf4-4c01-47bc-be81-451ca57e7347" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.989760] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Refreshing network info cache for port 533485bf-4e5b-467a-a80c-4e9867e7efbe {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 691.022348] env[65758]: DEBUG oslo_vmware.api [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250078} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.022805] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 691.022805] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 691.022947] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 691.023139] env[65758]: INFO nova.compute.manager [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Took 1.29 seconds to destroy the instance on the hypervisor. [ 691.023409] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 691.023768] env[65758]: DEBUG nova.compute.manager [-] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 691.023768] env[65758]: DEBUG nova.network.neutron [-] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 691.024318] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 691.024571] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 691.024841] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 691.046560] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "974d06c1-2704-4a78-bbd7-f54335c4288e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.046840] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "974d06c1-2704-4a78-bbd7-f54335c4288e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.096543] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660113, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.234935] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660110, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.237462] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.270047] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660114, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.175477} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.270379] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 691.271561] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16805d6b-237e-448c-b264-249b4da03b1c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.302382] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] b6b673e9-0ae1-4c7c-be53-e83641063cf8/b6b673e9-0ae1-4c7c-be53-e83641063cf8.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 691.303118] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7f1a3e7-e397-4e72-b983-80675cf66d86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.331430] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 691.331430] env[65758]: value = "task-4660115" [ 691.331430] env[65758]: _type = "Task" [ 691.331430] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.341036] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529976b8-0f48-58e0-a6b8-76286a1fd1d0, 'name': SearchDatastore_Task, 'duration_secs': 0.028524} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.341304] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c51ccdf0-ec21-4d6b-826c-6f3713bdb94f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.349945] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.354188] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 691.354188] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f5571d-57db-9896-9d0b-5f8200b00347" [ 691.354188] env[65758]: _type = "Task" [ 691.354188] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.363879] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f5571d-57db-9896-9d0b-5f8200b00347, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.406331] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "e6159a35-f073-4931-b0b0-832a88680356" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.406582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.413391] env[65758]: INFO nova.compute.manager [-] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Took 1.16 seconds to deallocate network for instance. [ 691.417205] env[65758]: DEBUG nova.scheduler.client.report [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.494935] env[65758]: WARNING neutronclient.v2_0.client [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 691.495929] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 691.496730] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 691.560045] env[65758]: DEBUG nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 691.600804] env[65758]: DEBUG oslo_vmware.api [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660113, 'name': PowerOnVM_Task, 'duration_secs': 0.942393} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.601187] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.601459] env[65758]: INFO nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Took 12.05 seconds to spawn the instance on the hypervisor. [ 691.601666] env[65758]: DEBUG nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 691.602591] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410be807-9f13-4e55-ad4c-645c608e3094 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.735504] env[65758]: DEBUG oslo_vmware.api [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660110, 'name': ReconfigVM_Task, 'duration_secs': 1.233845} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.735709] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909844', 'volume_id': '34e992c3-1078-4a20-bf5f-0781ef54676c', 'name': 'volume-34e992c3-1078-4a20-bf5f-0781ef54676c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e60efbcd-1c4e-40a1-8bc1-893daa511073', 'attached_at': '', 'detached_at': '', 'volume_id': '34e992c3-1078-4a20-bf5f-0781ef54676c', 'serial': '34e992c3-1078-4a20-bf5f-0781ef54676c'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 691.745241] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 691.808825] env[65758]: DEBUG nova.network.neutron [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Updated VIF entry in instance network info cache for port 67c82890-e746-45ff-9f1b-e905834b0064. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 691.809251] env[65758]: DEBUG nova.network.neutron [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Updating instance_info_cache with network_info: [{"id": "67c82890-e746-45ff-9f1b-e905834b0064", "address": "fa:16:3e:21:f7:12", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c82890-e7", "ovs_interfaceid": "67c82890-e746-45ff-9f1b-e905834b0064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 691.844421] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.851321] env[65758]: DEBUG nova.network.neutron [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Updating instance_info_cache with network_info: [{"id": "13ded2e1-9fb6-4eed-b82c-82509e35b8fb", "address": "fa:16:3e:5b:e3:fa", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13ded2e1-9f", "ovs_interfaceid": "13ded2e1-9fb6-4eed-b82c-82509e35b8fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 691.867768] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f5571d-57db-9896-9d0b-5f8200b00347, 'name': SearchDatastore_Task, 'duration_secs': 0.02407} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.867768] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.868125] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 2bd02c6d-a139-4259-8b28-eed5efc5d094/2bd02c6d-a139-4259-8b28-eed5efc5d094.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 691.869169] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d6367b9-bf90-4679-b41c-a13dbe97b5ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.879428] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 691.879428] env[65758]: value = "task-4660116" [ 691.879428] env[65758]: _type = "Task" [ 691.879428] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.890400] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.909676] env[65758]: DEBUG nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 691.927586] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.928307] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.810s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.928978] env[65758]: DEBUG nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 691.933611] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.094s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.095253] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.132166] env[65758]: INFO nova.compute.manager [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Took 37.23 seconds to build instance. [ 692.316228] env[65758]: DEBUG oslo_concurrency.lockutils [req-f466859c-6868-4e52-ad8e-a4015346a735 req-2f2e37a6-009a-447f-8bd5-50df2322a43e service nova] Releasing lock "refresh_cache-492d1063-8eaf-4207-8d65-341fbc0b6c39" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.349335] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660115, 'name': ReconfigVM_Task, 'duration_secs': 0.599098} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.350400] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Reconfigured VM instance instance-0000001e to attach disk [datastore1] b6b673e9-0ae1-4c7c-be53-e83641063cf8/b6b673e9-0ae1-4c7c-be53-e83641063cf8.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 692.352091] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0edf274f-4c11-4a77-97a5-6c1118b7a7cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.359983] env[65758]: DEBUG oslo_concurrency.lockutils [None req-961e5fb2-4a39-4efa-bf77-93fe0b839ae9 tempest-ServerExternalEventsTest-1604752003 tempest-ServerExternalEventsTest-1604752003-project] Releasing lock "refresh_cache-8eb65797-072b-4a7e-853d-26c0adc51bb2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.364070] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 692.364070] env[65758]: value = "task-4660117" [ 692.364070] env[65758]: _type = "Task" [ 692.364070] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.375044] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660117, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.397037] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660116, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.437166] env[65758]: DEBUG nova.compute.utils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 692.452655] env[65758]: DEBUG nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 692.452998] env[65758]: DEBUG nova.network.neutron [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 692.453370] env[65758]: WARNING neutronclient.v2_0.client [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 692.453684] env[65758]: WARNING neutronclient.v2_0.client [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 692.454370] env[65758]: WARNING openstack [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 692.454740] env[65758]: WARNING openstack [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 692.466090] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.636260] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7f249ae-1674-4fb3-9635-ec28fb7562d3 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "148eddf4-4c01-47bc-be81-451ca57e7347" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.851s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.701934] env[65758]: DEBUG nova.policy [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b6e413458a84a9b8f2b6dcd0061fc33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd550f85853f447bb91a89b6bc6c5720', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 692.731789] env[65758]: WARNING neutronclient.v2_0.client [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 692.733252] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 692.734225] env[65758]: WARNING openstack [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 692.820109] env[65758]: DEBUG nova.objects.instance [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Lazy-loading 'flavor' on Instance uuid e60efbcd-1c4e-40a1-8bc1-893daa511073 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 692.874986] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660117, 'name': Rename_Task, 'duration_secs': 0.253868} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.876215] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 692.877293] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fd5dcf1-f4d6-4041-8adb-66c789a82d6c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.889099] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 692.889099] env[65758]: value = "task-4660118" [ 692.889099] env[65758]: _type = "Task" [ 692.889099] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.897418] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.825107} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.898209] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 2bd02c6d-a139-4259-8b28-eed5efc5d094/2bd02c6d-a139-4259-8b28-eed5efc5d094.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 692.898645] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 692.898979] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3532a50-5265-4cd5-8600-ef631d5c9468 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.905484] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.912923] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 692.912923] env[65758]: value = "task-4660119" [ 692.912923] env[65758]: _type = "Task" [ 692.912923] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.928481] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.963922] env[65758]: DEBUG nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 692.968680] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Applying migration context for instance 83fa942b-a195-4bcb-9ed5-5bb6764220a4 as it has an incoming, in-progress migration 28721f73-0009-4427-b697-d46294cf6cb7. Migration status is confirming {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 692.971816] env[65758]: INFO nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating resource usage from migration 28721f73-0009-4427-b697-d46294cf6cb7 [ 693.009900] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 549673ec-3d75-4aad-a001-014f3f53a6b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.009900] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance adc1b956-1b5a-4272-b0ff-95a565e9c45c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.009900] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 37aadd44-79e8-4479-862f-265549c9d802 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.009900] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e48a075b-41b3-4612-bd5f-0a158d707a2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.009900] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 2d787237-26e5-4519-9f6e-1d30b9d016cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.009900] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e60efbcd-1c4e-40a1-8bc1-893daa511073 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.009900] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.010781] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 24379189-b10a-4ef6-a3f6-b7bb43029dab is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 693.010781] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance f7a14628-cc55-41fa-ae89-3958855df8a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.010852] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 1e249ca9-a7a8-440f-832b-a8f5d84ada8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.011468] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 483765b5-c63c-4aac-9082-519bbc4e6eb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.011468] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Migration 28721f73-0009-4427-b697-d46294cf6cb7 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 693.011468] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 83fa942b-a195-4bcb-9ed5-5bb6764220a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.011600] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a2010738-759b-480a-8360-2639788056b1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 693.011739] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 0addcbb1-3561-4c93-b714-37e6b613b962 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.011857] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 24016efd-cdb3-4c1e-9c08-8643400e729e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.011968] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 8eb65797-072b-4a7e-853d-26c0adc51bb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.012164] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 9e007d55-0a5c-4469-a546-9b18e188bea0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.012335] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 492d1063-8eaf-4207-8d65-341fbc0b6c39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.012479] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 03073968-e679-4ce5-9f84-c4765217b308 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.012652] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 148eddf4-4c01-47bc-be81-451ca57e7347 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.012806] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance b6b673e9-0ae1-4c7c-be53-e83641063cf8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.012963] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 2bd02c6d-a139-4259-8b28-eed5efc5d094 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.014893] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 693.195795] env[65758]: DEBUG nova.network.neutron [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Successfully created port: b4a1d8f6-c296-4e9a-9582-489f0ebc77a6 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 693.200550] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Updated VIF entry in instance network info cache for port 533485bf-4e5b-467a-a80c-4e9867e7efbe. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 693.200916] env[65758]: DEBUG nova.network.neutron [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Updating instance_info_cache with network_info: [{"id": "533485bf-4e5b-467a-a80c-4e9867e7efbe", "address": "fa:16:3e:28:16:66", "network": {"id": "a1ba14ee-33b9-4620-87b2-e8754cd21a0a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-268262662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc1b1a2357a4f34b1093150b27de587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap533485bf-4e", "ovs_interfaceid": "533485bf-4e5b-467a-a80c-4e9867e7efbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 693.328237] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47c63908-9832-4b9a-93ee-d6d60c7eaee9 tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.626s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.401391] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660118, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.425425] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072363} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.425838] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 693.426872] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6e738f-b4f3-42d2-acab-a8d244ddfe1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.456684] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 2bd02c6d-a139-4259-8b28-eed5efc5d094/2bd02c6d-a139-4259-8b28-eed5efc5d094.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 693.457081] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5052f06-125c-4cf5-b194-5b716a38457e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.492070] env[65758]: DEBUG nova.network.neutron [-] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 693.493175] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 693.493175] env[65758]: value = "task-4660120" [ 693.493175] env[65758]: _type = "Task" [ 693.493175] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.496859] env[65758]: DEBUG nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Received event network-vif-plugged-3e23d45e-b849-47dd-9649-500080939b87 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 693.497068] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Acquiring lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.497271] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.497427] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.497580] env[65758]: DEBUG nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] No waiting events found dispatching network-vif-plugged-3e23d45e-b849-47dd-9649-500080939b87 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 693.497729] env[65758]: WARNING nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Received unexpected event network-vif-plugged-3e23d45e-b849-47dd-9649-500080939b87 for instance with vm_state building and task_state spawning. [ 693.497875] env[65758]: DEBUG nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Received event network-changed-3e23d45e-b849-47dd-9649-500080939b87 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 693.498025] env[65758]: DEBUG nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Refreshing instance network info cache due to event network-changed-3e23d45e-b849-47dd-9649-500080939b87. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 693.498199] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Acquiring lock "refresh_cache-b6b673e9-0ae1-4c7c-be53-e83641063cf8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.498328] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Acquired lock "refresh_cache-b6b673e9-0ae1-4c7c-be53-e83641063cf8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.498475] env[65758]: DEBUG nova.network.neutron [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Refreshing network info cache for port 3e23d45e-b849-47dd-9649-500080939b87 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 693.512850] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660120, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.520307] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ec1e2845-e73a-40ff-9b6c-1d8281859fba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 693.704321] env[65758]: DEBUG oslo_concurrency.lockutils [req-2c6afd63-78d1-4c99-b1b9-821e44850007 req-bb407bb0-cd32-46eb-8875-ed56111b7003 service nova] Releasing lock "refresh_cache-148eddf4-4c01-47bc-be81-451ca57e7347" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.790518] env[65758]: DEBUG nova.compute.manager [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Received event network-changed-872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 693.793326] env[65758]: DEBUG nova.compute.manager [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Refreshing instance network info cache due to event network-changed-872949b5-9bac-4f83-acec-93e23be464c5. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 693.793326] env[65758]: DEBUG oslo_concurrency.lockutils [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] Acquiring lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.793326] env[65758]: DEBUG oslo_concurrency.lockutils [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] Acquired lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.793326] env[65758]: DEBUG nova.network.neutron [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Refreshing network info cache for port 872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 693.904951] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660118, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.981504] env[65758]: DEBUG nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 693.995497] env[65758]: INFO nova.compute.manager [-] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Took 2.97 seconds to deallocate network for instance. [ 694.008147] env[65758]: WARNING neutronclient.v2_0.client [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 694.011022] env[65758]: WARNING openstack [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 694.011022] env[65758]: WARNING openstack [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 694.021686] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance b7692c74-c919-45b4-991b-c06a530ff9ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 694.031800] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660120, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.035885] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 694.037064] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 694.037705] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 694.037705] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 694.037832] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 694.038409] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 694.038409] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 694.038409] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 694.038650] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 694.038863] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 694.039338] env[65758]: DEBUG nova.virt.hardware [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 694.040687] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac3ca92-80fb-405e-ae5f-f047b6ab8756 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.062839] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5708b914-9545-4487-8eaa-0bb373ace302 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.105648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquiring lock "8eb65797-072b-4a7e-853d-26c0adc51bb2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.105648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "8eb65797-072b-4a7e-853d-26c0adc51bb2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.105648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquiring lock "8eb65797-072b-4a7e-853d-26c0adc51bb2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.105648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "8eb65797-072b-4a7e-853d-26c0adc51bb2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.105648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "8eb65797-072b-4a7e-853d-26c0adc51bb2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.108049] env[65758]: INFO nova.compute.manager [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Terminating instance [ 694.147039] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.147397] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.296788] env[65758]: WARNING neutronclient.v2_0.client [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 694.296788] env[65758]: WARNING openstack [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 694.296945] env[65758]: WARNING openstack [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 694.402554] env[65758]: DEBUG oslo_vmware.api [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660118, 'name': PowerOnVM_Task, 'duration_secs': 1.494895} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.403231] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 694.403231] env[65758]: INFO nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Took 12.27 seconds to spawn the instance on the hypervisor. [ 694.403485] env[65758]: DEBUG nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 694.405487] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f407ec87-5ead-462e-b7bd-324da17a92e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.506549] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660120, 'name': ReconfigVM_Task, 'duration_secs': 0.623209} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.506880] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 2bd02c6d-a139-4259-8b28-eed5efc5d094/2bd02c6d-a139-4259-8b28-eed5efc5d094.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 694.507553] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-328d86c5-d12a-4c25-8309-bfde2afbd3c8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.510010] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.516665] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 694.516665] env[65758]: value = "task-4660121" [ 694.516665] env[65758]: _type = "Task" [ 694.516665] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.526129] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660121, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.534121] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance de8f3600-b25f-4396-af37-ea703587979c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 694.552953] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "24016efd-cdb3-4c1e-9c08-8643400e729e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.553223] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "24016efd-cdb3-4c1e-9c08-8643400e729e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.553433] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "24016efd-cdb3-4c1e-9c08-8643400e729e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.553618] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "24016efd-cdb3-4c1e-9c08-8643400e729e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.553767] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "24016efd-cdb3-4c1e-9c08-8643400e729e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.556268] env[65758]: INFO nova.compute.manager [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Terminating instance [ 694.617241] env[65758]: DEBUG nova.compute.manager [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 694.617819] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.619011] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2cae4d1-afc7-482c-8d9b-62a1634eaeca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.629967] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 694.629967] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fa5d945-7cfc-45ef-bf0e-ab9de6d4fe84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.637827] env[65758]: DEBUG oslo_vmware.api [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 694.637827] env[65758]: value = "task-4660122" [ 694.637827] env[65758]: _type = "Task" [ 694.637827] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.649483] env[65758]: DEBUG oslo_vmware.api [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660122, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.649830] env[65758]: DEBUG nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 694.889913] env[65758]: DEBUG nova.network.neutron [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Successfully updated port: b4a1d8f6-c296-4e9a-9582-489f0ebc77a6 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 694.924608] env[65758]: WARNING neutronclient.v2_0.client [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 694.925301] env[65758]: WARNING openstack [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 694.925727] env[65758]: WARNING openstack [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 694.934477] env[65758]: INFO nova.compute.manager [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Took 36.82 seconds to build instance. [ 695.037345] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 695.044761] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660121, 'name': Rename_Task, 'duration_secs': 0.189363} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.045391] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 695.045648] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e71a64cd-8493-45e9-b8f7-1ceb81b93c16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.053803] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 695.053803] env[65758]: value = "task-4660123" [ 695.053803] env[65758]: _type = "Task" [ 695.053803] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.059366] env[65758]: DEBUG nova.compute.manager [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 695.059614] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.064365] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818c4588-60a8-4657-881e-1984041dbd86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.068817] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660123, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.074275] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 695.074550] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ffc37ae-7bff-4c19-a283-b95fb3177aa5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.081743] env[65758]: DEBUG oslo_vmware.api [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 695.081743] env[65758]: value = "task-4660124" [ 695.081743] env[65758]: _type = "Task" [ 695.081743] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.084052] env[65758]: DEBUG nova.network.neutron [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Updated VIF entry in instance network info cache for port 3e23d45e-b849-47dd-9649-500080939b87. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 695.084412] env[65758]: DEBUG nova.network.neutron [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Updating instance_info_cache with network_info: [{"id": "3e23d45e-b849-47dd-9649-500080939b87", "address": "fa:16:3e:2a:7d:15", "network": {"id": "e9c8c55e-58ed-4ac8-bd45-2e7dc91beb47", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1571509614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df8b333d08c54634b617dd8284143beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e23d45e-b8", "ovs_interfaceid": "3e23d45e-b849-47dd-9649-500080939b87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 695.096876] env[65758]: DEBUG oslo_vmware.api [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.110650] env[65758]: WARNING neutronclient.v2_0.client [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 695.111420] env[65758]: WARNING openstack [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 695.111760] env[65758]: WARNING openstack [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 695.150718] env[65758]: DEBUG oslo_vmware.api [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660122, 'name': PowerOffVM_Task, 'duration_secs': 0.365204} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.150718] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.150790] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.151029] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0f2d65e-b3e6-43df-957d-f0076d02ee28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.183618] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.223018] env[65758]: DEBUG nova.network.neutron [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Updated VIF entry in instance network info cache for port 872949b5-9bac-4f83-acec-93e23be464c5. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 695.223018] env[65758]: DEBUG nova.network.neutron [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Updating instance_info_cache with network_info: [{"id": "872949b5-9bac-4f83-acec-93e23be464c5", "address": "fa:16:3e:f8:7d:e0", "network": {"id": "115e8c49-6d73-405e-a185-9072fb560eb2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1829868231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ca73ea9954543e38b16a12b37d531c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap872949b5-9b", "ovs_interfaceid": "872949b5-9bac-4f83-acec-93e23be464c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 695.235075] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.235325] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.235594] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Deleting the datastore file [datastore1] 8eb65797-072b-4a7e-853d-26c0adc51bb2 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.235912] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05ab3252-9382-46a9-bb67-b741b0586fef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.245893] env[65758]: DEBUG oslo_vmware.api [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for the task: (returnval){ [ 695.245893] env[65758]: value = "task-4660126" [ 695.245893] env[65758]: _type = "Task" [ 695.245893] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.255613] env[65758]: DEBUG oslo_vmware.api [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660126, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.394214] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.394427] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.394721] env[65758]: DEBUG nova.network.neutron [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 695.437042] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf3d862a-300f-40ad-b512-66a59bb99a9c tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.330s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.546485] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 974d06c1-2704-4a78-bbd7-f54335c4288e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 695.568863] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660123, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.591759] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Releasing lock "refresh_cache-b6b673e9-0ae1-4c7c-be53-e83641063cf8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.592826] env[65758]: DEBUG nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Received event network-vif-plugged-9f2a1cde-126e-4502-835b-0bb1647edbfe {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 695.592826] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Acquiring lock "2bd02c6d-a139-4259-8b28-eed5efc5d094-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.592826] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Lock "2bd02c6d-a139-4259-8b28-eed5efc5d094-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.592975] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Lock "2bd02c6d-a139-4259-8b28-eed5efc5d094-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.593336] env[65758]: DEBUG nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] No waiting events found dispatching network-vif-plugged-9f2a1cde-126e-4502-835b-0bb1647edbfe {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 695.593573] env[65758]: WARNING nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Received unexpected event network-vif-plugged-9f2a1cde-126e-4502-835b-0bb1647edbfe for instance with vm_state building and task_state spawning. [ 695.593801] env[65758]: DEBUG nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Received event network-changed-9f2a1cde-126e-4502-835b-0bb1647edbfe {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 695.593970] env[65758]: DEBUG nova.compute.manager [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Refreshing instance network info cache due to event network-changed-9f2a1cde-126e-4502-835b-0bb1647edbfe. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 695.594174] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Acquiring lock "refresh_cache-2bd02c6d-a139-4259-8b28-eed5efc5d094" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.594311] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Acquired lock "refresh_cache-2bd02c6d-a139-4259-8b28-eed5efc5d094" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.594463] env[65758]: DEBUG nova.network.neutron [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Refreshing network info cache for port 9f2a1cde-126e-4502-835b-0bb1647edbfe {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 695.602740] env[65758]: DEBUG oslo_vmware.api [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660124, 'name': PowerOffVM_Task, 'duration_secs': 0.300787} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.603352] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.603794] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.604424] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52e7766d-3d25-4697-91c8-490933f58b86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.688343] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.688605] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.688812] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Deleting the datastore file [datastore2] 24016efd-cdb3-4c1e-9c08-8643400e729e {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.689154] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71658734-59a2-442f-b1da-9ccabcdbad06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.697965] env[65758]: DEBUG oslo_vmware.api [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for the task: (returnval){ [ 695.697965] env[65758]: value = "task-4660128" [ 695.697965] env[65758]: _type = "Task" [ 695.697965] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.711887] env[65758]: DEBUG oslo_vmware.api [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.725985] env[65758]: DEBUG oslo_concurrency.lockutils [req-77b3e16d-d965-42d2-aa67-1d81e548d86f req-7e70f573-5323-4f9e-acd8-440349dbcdae service nova] Releasing lock "refresh_cache-0addcbb1-3561-4c93-b714-37e6b613b962" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.759101] env[65758]: DEBUG oslo_vmware.api [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Task: {'id': task-4660126, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396307} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.759686] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 695.760605] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 695.760989] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.761353] env[65758]: INFO nova.compute.manager [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 695.761889] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 695.762259] env[65758]: DEBUG nova.compute.manager [-] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 695.762492] env[65758]: DEBUG nova.network.neutron [-] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 695.762916] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 695.763675] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 695.764248] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 695.826372] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 695.898864] env[65758]: WARNING openstack [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 695.901126] env[65758]: WARNING openstack [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 696.052437] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e6159a35-f073-4931-b0b0-832a88680356 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 696.052775] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 22 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 696.052948] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4800MB phys_disk=100GB used_disk=22GB total_vcpus=48 used_vcpus=22 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '21', 'num_vm_active': '15', 'num_task_None': '18', 'num_os_type_None': '21', 'num_proj_d999e5f3384e4a24ad9ec68b2fa3fda7': '2', 'io_workload': '3', 'num_vm_resized': '1', 'num_proj_cdaabf2897064b5a948dbdb6d5921d76': '1', 'num_proj_06aa7ad9cf4f4f528687bbd3e6d12b0d': '1', 'num_proj_e114eef3998848699a9a086fee86db29': '1', 'num_proj_e5fc14c9e85d404a8a6db0167ac84491': '1', 'num_proj_9aaf5b39abda42f28a847d5fe0d0ecec': '3', 'num_proj_d100ba970de24698aff03c4c537b3c18': '1', 'num_vm_suspended': '1', 'num_proj_45aad313d10447e9ba61ed0a05b915ba': '1', 'num_task_deleting': '1', 'num_proj_237226a477354874a363a8670187a1a9': '1', 'num_proj_9ca73ea9954543e38b16a12b37d531c6': '1', 'num_vm_error': '1', 'num_proj_aab1df827abb49b88b951d30ba485d39': '1', 'num_proj_2030cc491d604d46bda3753f5a3485a5': '1', 'num_proj_efc1b1a2357a4f34b1093150b27de587': '3', 'num_vm_building': '3', 'num_task_spawning': '2', 'num_proj_df8b333d08c54634b617dd8284143beb': '1', 'num_proj_c18712cd526b4e25b07140cb554b04d0': '1', 'num_proj_fd550f85853f447bb91a89b6bc6c5720': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 696.067048] env[65758]: DEBUG oslo_vmware.api [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660123, 'name': PowerOnVM_Task, 'duration_secs': 0.909987} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.067366] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 696.067564] env[65758]: INFO nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Took 11.28 seconds to spawn the instance on the hypervisor. [ 696.067810] env[65758]: DEBUG nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 696.068752] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ab337e-30b9-45b7-b42d-b017ec1d8e6c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.098683] env[65758]: WARNING neutronclient.v2_0.client [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 696.099737] env[65758]: WARNING openstack [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 696.099737] env[65758]: WARNING openstack [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 696.155288] env[65758]: DEBUG nova.network.neutron [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 696.211245] env[65758]: DEBUG oslo_vmware.api [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Task: {'id': task-4660128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281152} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.211245] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 696.211245] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 696.211245] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 696.211555] env[65758]: INFO nova.compute.manager [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 696.211626] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 696.211821] env[65758]: DEBUG nova.compute.manager [-] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 696.213030] env[65758]: DEBUG nova.network.neutron [-] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 696.213030] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 696.213030] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 696.213229] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 696.299056] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 696.343964] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "54db018a-d54c-4fe5-9a6e-600e801e00b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.344233] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "54db018a-d54c-4fe5-9a6e-600e801e00b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.350999] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "cca3e019-8e82-4473-8609-291703762a6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.351344] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "cca3e019-8e82-4473-8609-291703762a6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.590983] env[65758]: INFO nova.compute.manager [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Took 29.54 seconds to build instance. [ 696.628605] env[65758]: DEBUG nova.network.neutron [-] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 696.678297] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35516330-747a-445d-9940-e1ee5da26dab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.691910] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7a043a-e021-48fa-b028-46617c034f04 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.729244] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e49ca2-c952-4e14-8946-1974c32e4ae5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.739078] env[65758]: WARNING neutronclient.v2_0.client [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 696.739824] env[65758]: WARNING openstack [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 696.740240] env[65758]: WARNING openstack [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 696.749033] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8803b600-be48-4e57-bfbb-d9d866574e27 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.765187] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.853641] env[65758]: DEBUG nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 696.863094] env[65758]: WARNING neutronclient.v2_0.client [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 696.864177] env[65758]: WARNING openstack [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 696.866347] env[65758]: WARNING openstack [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 697.041023] env[65758]: DEBUG nova.network.neutron [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Updating instance_info_cache with network_info: [{"id": "b4a1d8f6-c296-4e9a-9582-489f0ebc77a6", "address": "fa:16:3e:64:7a:dd", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a1d8f6-c2", "ovs_interfaceid": "b4a1d8f6-c296-4e9a-9582-489f0ebc77a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 697.096080] env[65758]: DEBUG oslo_concurrency.lockutils [None req-70414e93-1f23-438a-8ae3-eb66ce7cfee8 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "2bd02c6d-a139-4259-8b28-eed5efc5d094" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.946s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.131458] env[65758]: INFO nova.compute.manager [-] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Took 1.37 seconds to deallocate network for instance. [ 697.156601] env[65758]: DEBUG nova.network.neutron [-] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 697.174269] env[65758]: DEBUG nova.network.neutron [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Updated VIF entry in instance network info cache for port 9f2a1cde-126e-4502-835b-0bb1647edbfe. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 697.174669] env[65758]: DEBUG nova.network.neutron [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Updating instance_info_cache with network_info: [{"id": "9f2a1cde-126e-4502-835b-0bb1647edbfe", "address": "fa:16:3e:07:42:48", "network": {"id": "0179f052-13e6-4824-928b-95f34f098cfe", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2018208707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c18712cd526b4e25b07140cb554b04d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd998416-f3d6-4a62-b828-5011063ce76a", "external-id": "nsx-vlan-transportzone-57", "segmentation_id": 57, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2a1cde-12", "ovs_interfaceid": "9f2a1cde-126e-4502-835b-0bb1647edbfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 697.270933] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.384082] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.544631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.545440] env[65758]: DEBUG nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Instance network_info: |[{"id": "b4a1d8f6-c296-4e9a-9582-489f0ebc77a6", "address": "fa:16:3e:64:7a:dd", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a1d8f6-c2", "ovs_interfaceid": "b4a1d8f6-c296-4e9a-9582-489f0ebc77a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 697.545614] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:7a:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4a1d8f6-c296-4e9a-9582-489f0ebc77a6', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 697.552989] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 697.553661] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 697.554421] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb4f18ad-060c-4625-a713-f1665c5cb916 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.574155] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 697.574155] env[65758]: value = "task-4660129" [ 697.574155] env[65758]: _type = "Task" [ 697.574155] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.583877] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660129, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.599480] env[65758]: DEBUG nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 697.641236] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.661881] env[65758]: INFO nova.compute.manager [-] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Took 1.45 seconds to deallocate network for instance. [ 697.678750] env[65758]: DEBUG oslo_concurrency.lockutils [req-ecf27a80-5962-4a6a-a45f-0bd59cd79025 req-974a91c8-3006-4800-a44b-eda88c971ee1 service nova] Releasing lock "refresh_cache-2bd02c6d-a139-4259-8b28-eed5efc5d094" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.777417] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 697.777685] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.845s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.777971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.756s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.779528] env[65758]: INFO nova.compute.claims [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.088255] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660129, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.128483] env[65758]: DEBUG nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: a2010738-759b-480a-8360-2639788056b1] Received event network-vif-deleted-acfd4b22-8660-461e-9e71-fa7134a7936e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 698.128483] env[65758]: DEBUG nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-vif-deleted-acba5b06-e536-4848-be4e-db877af4d6ac {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 698.129596] env[65758]: DEBUG nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-vif-deleted-d0a812a9-0121-493b-92c4-16221a927a6a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 698.129596] env[65758]: DEBUG nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Received event network-vif-deleted-026141b1-3811-4baa-8195-d418fa316270 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 698.129596] env[65758]: DEBUG nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Received event network-vif-plugged-b4a1d8f6-c296-4e9a-9582-489f0ebc77a6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 698.129596] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Acquiring lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.129754] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.129805] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.129961] env[65758]: DEBUG nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] No waiting events found dispatching network-vif-plugged-b4a1d8f6-c296-4e9a-9582-489f0ebc77a6 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 698.131033] env[65758]: WARNING nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Received unexpected event network-vif-plugged-b4a1d8f6-c296-4e9a-9582-489f0ebc77a6 for instance with vm_state building and task_state spawning. [ 698.131033] env[65758]: DEBUG nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Received event network-changed-b4a1d8f6-c296-4e9a-9582-489f0ebc77a6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 698.131033] env[65758]: DEBUG nova.compute.manager [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Refreshing instance network info cache due to event network-changed-b4a1d8f6-c296-4e9a-9582-489f0ebc77a6. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 698.131446] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Acquiring lock "refresh_cache-8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.131446] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Acquired lock "refresh_cache-8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.131446] env[65758]: DEBUG nova.network.neutron [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Refreshing network info cache for port b4a1d8f6-c296-4e9a-9582-489f0ebc77a6 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 698.143150] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.170435] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.589962] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660129, 'name': CreateVM_Task, 'duration_secs': 0.830394} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.590453] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 698.591114] env[65758]: WARNING neutronclient.v2_0.client [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 698.591114] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.591264] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.591559] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 698.591996] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7762272b-2959-4297-86e7-6507a231b196 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.598355] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 698.598355] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524a75da-8da4-bdf9-ee43-9ca70f86c2f9" [ 698.598355] env[65758]: _type = "Task" [ 698.598355] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.613656] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a75da-8da4-bdf9-ee43-9ca70f86c2f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.635727] env[65758]: WARNING neutronclient.v2_0.client [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 698.636672] env[65758]: WARNING openstack [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 698.637217] env[65758]: WARNING openstack [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 698.887287] env[65758]: WARNING neutronclient.v2_0.client [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 698.888169] env[65758]: WARNING openstack [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 698.888335] env[65758]: WARNING openstack [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 698.904521] env[65758]: DEBUG nova.compute.manager [req-10d99577-6395-4ce3-bbfd-2ac59e292169 req-ca03172f-0d5f-4683-8b35-535edaed2e87 service nova] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Received event network-vif-deleted-8c662ce6-206b-49ce-836c-0bbc9792f182 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 699.086768] env[65758]: DEBUG nova.network.neutron [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Updated VIF entry in instance network info cache for port b4a1d8f6-c296-4e9a-9582-489f0ebc77a6. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 699.086768] env[65758]: DEBUG nova.network.neutron [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Updating instance_info_cache with network_info: [{"id": "b4a1d8f6-c296-4e9a-9582-489f0ebc77a6", "address": "fa:16:3e:64:7a:dd", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a1d8f6-c2", "ovs_interfaceid": "b4a1d8f6-c296-4e9a-9582-489f0ebc77a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 699.113803] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a75da-8da4-bdf9-ee43-9ca70f86c2f9, 'name': SearchDatastore_Task, 'duration_secs': 0.013398} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.113803] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.114365] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 699.114365] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.114365] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.114657] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 699.114884] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3712fc41-b739-4ed9-a707-8e5fb9ad47b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.129045] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 699.129045] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 699.130673] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28979da0-1bec-4aaf-b145-91bf24b74a90 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.139220] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 699.139220] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5226224f-7382-7560-2486-e808322c24d5" [ 699.139220] env[65758]: _type = "Task" [ 699.139220] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.155307] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5226224f-7382-7560-2486-e808322c24d5, 'name': SearchDatastore_Task, 'duration_secs': 0.010939} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.159313] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6192bbd5-0ca5-4f00-9fc2-1db5ce3a2462 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.168215] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 699.168215] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c9c42c-b197-62a7-36e1-5c07cae21dc6" [ 699.168215] env[65758]: _type = "Task" [ 699.168215] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.180701] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c9c42c-b197-62a7-36e1-5c07cae21dc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.426119] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c080f332-305c-48fc-9897-e80797443fc1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.440421] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8b3ce5-d3e1-4064-a1dd-5d0b20165ace {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.482970] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dc6fca-9d0b-4052-b419-8fb6fb7d37fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.494280] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5790e2-1455-4ff9-9709-e3deb04a6cfe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.511023] env[65758]: DEBUG nova.compute.provider_tree [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.591771] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ee887a3-bb15-4f76-9d94-a8f577d746c6 req-e98a364c-6142-4e9a-9ef3-7c5569b75d1d service nova] Releasing lock "refresh_cache-8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.678126] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c9c42c-b197-62a7-36e1-5c07cae21dc6, 'name': SearchDatastore_Task, 'duration_secs': 0.042393} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.679499] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.680365] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9/8a7f1d79-97ac-4503-a4ed-c99e4f6718c9.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 699.681115] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c34c4a14-4f5f-4d04-817f-4f1585b6f12b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.692155] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 699.692155] env[65758]: value = "task-4660130" [ 699.692155] env[65758]: _type = "Task" [ 699.692155] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.705630] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.951259] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Acquiring lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.951259] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.014559] env[65758]: DEBUG nova.scheduler.client.report [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.054813] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquiring lock "2bd02c6d-a139-4259-8b28-eed5efc5d094" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.055478] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "2bd02c6d-a139-4259-8b28-eed5efc5d094" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.055749] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquiring lock "2bd02c6d-a139-4259-8b28-eed5efc5d094-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.056097] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "2bd02c6d-a139-4259-8b28-eed5efc5d094-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.056348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "2bd02c6d-a139-4259-8b28-eed5efc5d094-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.062518] env[65758]: INFO nova.compute.manager [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Terminating instance [ 700.204858] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660130, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.456570] env[65758]: INFO nova.compute.manager [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Detaching volume 34e992c3-1078-4a20-bf5f-0781ef54676c [ 700.509963] env[65758]: INFO nova.virt.block_device [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Attempting to driver detach volume 34e992c3-1078-4a20-bf5f-0781ef54676c from mountpoint /dev/sdb [ 700.510223] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 700.510406] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909844', 'volume_id': '34e992c3-1078-4a20-bf5f-0781ef54676c', 'name': 'volume-34e992c3-1078-4a20-bf5f-0781ef54676c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e60efbcd-1c4e-40a1-8bc1-893daa511073', 'attached_at': '', 'detached_at': '', 'volume_id': '34e992c3-1078-4a20-bf5f-0781ef54676c', 'serial': '34e992c3-1078-4a20-bf5f-0781ef54676c'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 700.512381] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcab053-74b8-4993-892a-f224d0d4a52e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.535880] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.758s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.536446] env[65758]: DEBUG nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 700.539170] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.154s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.540612] env[65758]: INFO nova.compute.claims [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.543961] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ddb438-3dd6-40a9-950b-95ed61ff56c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.558102] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7176d62f-e096-4a3d-b43b-3be4babba116 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.585734] env[65758]: DEBUG nova.compute.manager [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 700.585734] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.587227] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d14b89-b36b-42d4-b2d4-2fce78355aaf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.591162] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd5eb2a-cfca-4867-8bab-f5683647f8eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.611921] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] The volume has not been displaced from its original location: [datastore1] volume-34e992c3-1078-4a20-bf5f-0781ef54676c/volume-34e992c3-1078-4a20-bf5f-0781ef54676c.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 700.617395] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Reconfiguring VM instance instance-0000000d to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 700.620492] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40224160-3348-40d0-8d3a-151607c722dd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.634348] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 700.635038] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f26276c-3b47-4dfd-9012-03a85b002708 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.644243] env[65758]: DEBUG oslo_vmware.api [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 700.644243] env[65758]: value = "task-4660131" [ 700.644243] env[65758]: _type = "Task" [ 700.644243] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.645611] env[65758]: DEBUG oslo_vmware.api [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Waiting for the task: (returnval){ [ 700.645611] env[65758]: value = "task-4660132" [ 700.645611] env[65758]: _type = "Task" [ 700.645611] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.659727] env[65758]: DEBUG oslo_vmware.api [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660132, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.663914] env[65758]: DEBUG oslo_vmware.api [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.705361] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531673} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.705434] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9/8a7f1d79-97ac-4503-a4ed-c99e4f6718c9.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 700.705639] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 700.705905] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3bed8ea-a3bc-406a-bb4a-3ac1d45a868e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.714491] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 700.714491] env[65758]: value = "task-4660133" [ 700.714491] env[65758]: _type = "Task" [ 700.714491] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.724293] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660133, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.055962] env[65758]: DEBUG nova.compute.utils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 701.055962] env[65758]: DEBUG nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 701.056494] env[65758]: DEBUG nova.network.neutron [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 701.057197] env[65758]: WARNING neutronclient.v2_0.client [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 701.058878] env[65758]: WARNING neutronclient.v2_0.client [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 701.060389] env[65758]: WARNING openstack [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 701.060389] env[65758]: WARNING openstack [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 701.164727] env[65758]: DEBUG oslo_vmware.api [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660131, 'name': PowerOffVM_Task, 'duration_secs': 0.21685} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.172484] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 701.172484] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 701.173055] env[65758]: DEBUG oslo_vmware.api [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660132, 'name': ReconfigVM_Task, 'duration_secs': 0.457422} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.173197] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f246beb2-a866-4e7e-8602-b470963c160c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.176932] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Reconfigured VM instance instance-0000000d to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 701.183099] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e64c4ebb-3179-4813-aced-1d173ea1e47d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.200018] env[65758]: DEBUG oslo_vmware.api [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Waiting for the task: (returnval){ [ 701.200018] env[65758]: value = "task-4660135" [ 701.200018] env[65758]: _type = "Task" [ 701.200018] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.211701] env[65758]: DEBUG oslo_vmware.api [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660135, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.225122] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660133, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084127} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.225465] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 701.226782] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef447358-7ea2-42bb-9dd0-720f0d0b39da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.251982] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9/8a7f1d79-97ac-4503-a4ed-c99e4f6718c9.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 701.252354] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-207b5167-0182-48df-9140-95b92ceeebdc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.270131] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 701.270358] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 701.270568] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Deleting the datastore file [datastore1] 2bd02c6d-a139-4259-8b28-eed5efc5d094 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 701.271231] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7f60e02-96a1-41fc-9b6a-849b5c4d1757 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.276715] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 701.276715] env[65758]: value = "task-4660136" [ 701.276715] env[65758]: _type = "Task" [ 701.276715] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.285046] env[65758]: DEBUG oslo_vmware.api [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for the task: (returnval){ [ 701.285046] env[65758]: value = "task-4660137" [ 701.285046] env[65758]: _type = "Task" [ 701.285046] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.288619] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660136, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.298927] env[65758]: DEBUG oslo_vmware.api [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.378884] env[65758]: DEBUG nova.policy [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '638c09153b9e4ec09f9fb5f87ba1e0ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '693b129cd84f4eee9971e7221e92c3e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 701.571057] env[65758]: DEBUG nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 701.633447] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "492d1063-8eaf-4207-8d65-341fbc0b6c39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.633812] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "492d1063-8eaf-4207-8d65-341fbc0b6c39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.634091] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "492d1063-8eaf-4207-8d65-341fbc0b6c39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.634318] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "492d1063-8eaf-4207-8d65-341fbc0b6c39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.634482] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "492d1063-8eaf-4207-8d65-341fbc0b6c39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.638425] env[65758]: INFO nova.compute.manager [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Terminating instance [ 701.713314] env[65758]: DEBUG oslo_vmware.api [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Task: {'id': task-4660135, 'name': ReconfigVM_Task, 'duration_secs': 0.277393} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.717635] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909844', 'volume_id': '34e992c3-1078-4a20-bf5f-0781ef54676c', 'name': 'volume-34e992c3-1078-4a20-bf5f-0781ef54676c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e60efbcd-1c4e-40a1-8bc1-893daa511073', 'attached_at': '', 'detached_at': '', 'volume_id': '34e992c3-1078-4a20-bf5f-0781ef54676c', 'serial': '34e992c3-1078-4a20-bf5f-0781ef54676c'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 701.791389] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660136, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.800728] env[65758]: DEBUG oslo_vmware.api [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Task: {'id': task-4660137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240285} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.804832] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.805056] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 701.805236] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 701.805402] env[65758]: INFO nova.compute.manager [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Took 1.22 seconds to destroy the instance on the hypervisor. [ 701.805648] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 701.809511] env[65758]: DEBUG nova.compute.manager [-] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 701.809511] env[65758]: DEBUG nova.network.neutron [-] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 701.809511] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 701.809511] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 701.809809] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 701.816261] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.816477] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.971146] env[65758]: DEBUG nova.network.neutron [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Successfully created port: 09744327-3d1a-4d1b-8f3c-8532ca1fef21 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 702.148122] env[65758]: DEBUG nova.compute.manager [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 702.148199] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.149568] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5459e0-36a9-47d0-9a1a-9f17fddafa12 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.161850] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 702.161850] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-323d3d79-3cc3-4a51-8f53-b859b7885b14 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.176735] env[65758]: DEBUG oslo_vmware.api [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 702.176735] env[65758]: value = "task-4660138" [ 702.176735] env[65758]: _type = "Task" [ 702.176735] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.182536] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896df660-4274-4688-9d72-7f84fb7d0053 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.194479] env[65758]: DEBUG oslo_vmware.api [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.197386] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dacc57b-4909-4220-b34c-0ac5ad2f1ace {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.231620] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f41dd48-c0dd-4adb-818d-7b35486c4acf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.240716] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9a1320-8035-45ce-9059-b4f489943457 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.256826] env[65758]: DEBUG nova.compute.provider_tree [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.288500] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660136, 'name': ReconfigVM_Task, 'duration_secs': 0.524931} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.288877] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9/8a7f1d79-97ac-4503-a4ed-c99e4f6718c9.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 702.289593] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86bd7a65-2a70-4263-aad7-90253cd13267 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.297941] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 702.297941] env[65758]: value = "task-4660139" [ 702.297941] env[65758]: _type = "Task" [ 702.297941] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.307601] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660139, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.309048] env[65758]: DEBUG nova.objects.instance [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Lazy-loading 'flavor' on Instance uuid e60efbcd-1c4e-40a1-8bc1-893daa511073 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 702.519150] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 702.585040] env[65758]: DEBUG nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 702.626403] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 702.626687] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 702.626838] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 702.627034] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 702.627173] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 702.627307] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 702.627556] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 702.627717] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 702.627878] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 702.628048] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 702.628336] env[65758]: DEBUG nova.virt.hardware [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 702.630970] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9226875-f3dc-447d-8a53-3624f645f191 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.641180] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040d3b42-adc5-4246-a064-7d043fb535b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.689490] env[65758]: DEBUG oslo_vmware.api [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660138, 'name': PowerOffVM_Task, 'duration_secs': 0.263152} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.689931] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 702.690119] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 702.690467] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4db9f358-1c9c-4e51-a659-ce03ca41d589 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.762750] env[65758]: DEBUG nova.scheduler.client.report [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.768823] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 702.768898] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 702.769103] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleting the datastore file [datastore1] 492d1063-8eaf-4207-8d65-341fbc0b6c39 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.770107] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8189d505-4904-4fd7-819c-323641ae0cba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.778996] env[65758]: DEBUG oslo_vmware.api [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 702.778996] env[65758]: value = "task-4660141" [ 702.778996] env[65758]: _type = "Task" [ 702.778996] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.793713] env[65758]: DEBUG oslo_vmware.api [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.810015] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660139, 'name': Rename_Task, 'duration_secs': 0.2337} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.810318] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 702.810489] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e232c4db-23c4-4f69-b5cc-249d1ae99f10 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.823569] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 702.823569] env[65758]: value = "task-4660142" [ 702.823569] env[65758]: _type = "Task" [ 702.823569] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.831866] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.271702] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.272310] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 703.275807] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.146s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.277555] env[65758]: INFO nova.compute.claims [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.292025] env[65758]: DEBUG oslo_vmware.api [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145291} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.292025] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.292025] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 703.292251] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.292293] env[65758]: INFO nova.compute.manager [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Took 1.14 seconds to destroy the instance on the hypervisor. [ 703.293329] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 703.293329] env[65758]: DEBUG nova.compute.manager [-] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 703.293329] env[65758]: DEBUG nova.network.neutron [-] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 703.293329] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 703.293605] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 703.294537] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 703.320246] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a8fe0ab-f6f4-49f0-9197-8967fb22c3bd tempest-VolumesAssistedSnapshotsTest-1397276490 tempest-VolumesAssistedSnapshotsTest-1397276490-project-admin] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.369s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.332834] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660142, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.783129] env[65758]: DEBUG nova.compute.utils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 703.789200] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 703.789200] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 703.789200] env[65758]: WARNING neutronclient.v2_0.client [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 703.789200] env[65758]: WARNING neutronclient.v2_0.client [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 703.790046] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 703.790426] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 703.836832] env[65758]: DEBUG oslo_vmware.api [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660142, 'name': PowerOnVM_Task, 'duration_secs': 0.672778} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.836832] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 703.836832] env[65758]: INFO nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Took 9.85 seconds to spawn the instance on the hypervisor. [ 703.836832] env[65758]: DEBUG nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 703.836832] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd98ffb0-1f43-4693-b8ab-8ebfdb8e30b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.126341] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 704.167768] env[65758]: DEBUG nova.policy [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68c293475b1641bda0a7f5a13f1cdbe1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4154e353eb4142178244814f4ebd6167', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 704.176515] env[65758]: DEBUG nova.network.neutron [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Successfully updated port: 09744327-3d1a-4d1b-8f3c-8532ca1fef21 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 704.286561] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 704.361042] env[65758]: INFO nova.compute.manager [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Took 29.08 seconds to build instance. [ 704.571451] env[65758]: DEBUG nova.network.neutron [-] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 704.576528] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Successfully created port: 47787883-24ce-41e2-9595-7d07b0d86022 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 704.681590] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.681590] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.681590] env[65758]: DEBUG nova.network.neutron [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 704.736570] env[65758]: DEBUG nova.compute.manager [req-9a5ec128-913c-410e-9dba-580bf8f59d4c req-903a6671-dd89-4798-930e-5a54932d7fc4 service nova] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Received event network-vif-deleted-13ded2e1-9fb6-4eed-b82c-82509e35b8fb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 704.863876] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce412ee3-8a85-4da2-a187-f3f19bfa097e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.590s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.946103] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d993383-240e-44ea-80c8-5508ee9fc162 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.956902] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6685db31-4f30-4fa7-aa8e-1269bf2293fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.995840] env[65758]: DEBUG nova.network.neutron [-] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 704.999011] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d05210-5123-482a-9c0b-6ecf1fa0a214 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.009827] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969d5457-d73a-4dec-bd6e-37c76126f314 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.025317] env[65758]: DEBUG nova.compute.provider_tree [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.082142] env[65758]: INFO nova.compute.manager [-] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Took 3.27 seconds to deallocate network for instance. [ 705.190214] env[65758]: WARNING openstack [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 705.190374] env[65758]: WARNING openstack [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 705.277895] env[65758]: DEBUG nova.network.neutron [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 705.299932] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 705.328809] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 705.329074] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 705.329230] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 705.329441] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 705.329587] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 705.329795] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 705.330087] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 705.330255] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 705.330418] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 705.330576] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 705.330882] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 705.331841] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b071ec1-479d-4412-898a-ec24790ba365 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.341482] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24a8232-2739-4b3b-a962-781fe743c836 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.368414] env[65758]: DEBUG nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 705.498802] env[65758]: INFO nova.compute.manager [-] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Took 2.21 seconds to deallocate network for instance. [ 705.529352] env[65758]: DEBUG nova.scheduler.client.report [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.592425] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquiring lock "e48a075b-41b3-4612-bd5f-0a158d707a2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.593068] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "e48a075b-41b3-4612-bd5f-0a158d707a2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.593173] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquiring lock "e48a075b-41b3-4612-bd5f-0a158d707a2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.593461] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "e48a075b-41b3-4612-bd5f-0a158d707a2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.593741] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "e48a075b-41b3-4612-bd5f-0a158d707a2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.598702] env[65758]: WARNING neutronclient.v2_0.client [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 705.599459] env[65758]: WARNING openstack [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 705.599829] env[65758]: WARNING openstack [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 705.610255] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.610517] env[65758]: INFO nova.compute.manager [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Terminating instance [ 705.896916] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.898920] env[65758]: DEBUG nova.network.neutron [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Updating instance_info_cache with network_info: [{"id": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "address": "fa:16:3e:01:ce:ae", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09744327-3d", "ovs_interfaceid": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 706.007461] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.034931] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.759s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.035502] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 706.039717] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.965s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.040530] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.042191] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 15.386s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.077931] env[65758]: INFO nova.scheduler.client.report [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Deleted allocations for instance a2010738-759b-480a-8360-2639788056b1 [ 706.115441] env[65758]: DEBUG nova.compute.manager [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 706.116398] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 706.117918] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf9e77b-2bc1-4804-843c-c9ebdc56aa23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.133989] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 706.134361] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae8b29d4-4fb6-43af-96fd-badd0e1b3ae5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.142353] env[65758]: DEBUG oslo_vmware.api [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 706.142353] env[65758]: value = "task-4660144" [ 706.142353] env[65758]: _type = "Task" [ 706.142353] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.153540] env[65758]: DEBUG oslo_vmware.api [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4660144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.224566] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquiring lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.224879] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.225107] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquiring lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.225315] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.225473] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.228911] env[65758]: INFO nova.compute.manager [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Terminating instance [ 706.286059] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Successfully updated port: 47787883-24ce-41e2-9595-7d07b0d86022 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 706.403038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.403690] env[65758]: DEBUG nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Instance network_info: |[{"id": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "address": "fa:16:3e:01:ce:ae", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09744327-3d", "ovs_interfaceid": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 706.404473] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:ce:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd77ecbc-aaaf-45f4-ae8f-977d90e4052f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09744327-3d1a-4d1b-8f3c-8532ca1fef21', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.415225] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Creating folder: Project (693b129cd84f4eee9971e7221e92c3e0). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.415225] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac27ca9f-998e-4dd2-b7cd-1d7fb9d92e97 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.429498] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Created folder: Project (693b129cd84f4eee9971e7221e92c3e0) in parent group-v909763. [ 706.429699] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Creating folder: Instances. Parent ref: group-v909852. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.430465] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe754f00-9075-4ee3-8ade-53bf0a8b889d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.447155] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Created folder: Instances in parent group-v909852. [ 706.447155] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 706.447155] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.447155] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7765cd5-b511-4b05-a968-e3dd614f1c28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.477897] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.477897] env[65758]: value = "task-4660147" [ 706.477897] env[65758]: _type = "Task" [ 706.477897] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.490364] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660147, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.515850] env[65758]: DEBUG nova.compute.manager [req-f8116dd4-0e63-41ff-b5c9-08030fe1da78 req-c4037b2f-5251-4e29-b16f-c5a337a4b8f6 service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Received event network-vif-plugged-09744327-3d1a-4d1b-8f3c-8532ca1fef21 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 706.516518] env[65758]: DEBUG oslo_concurrency.lockutils [req-f8116dd4-0e63-41ff-b5c9-08030fe1da78 req-c4037b2f-5251-4e29-b16f-c5a337a4b8f6 service nova] Acquiring lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.518301] env[65758]: DEBUG oslo_concurrency.lockutils [req-f8116dd4-0e63-41ff-b5c9-08030fe1da78 req-c4037b2f-5251-4e29-b16f-c5a337a4b8f6 service nova] Lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.518577] env[65758]: DEBUG oslo_concurrency.lockutils [req-f8116dd4-0e63-41ff-b5c9-08030fe1da78 req-c4037b2f-5251-4e29-b16f-c5a337a4b8f6 service nova] Lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.519561] env[65758]: DEBUG nova.compute.manager [req-f8116dd4-0e63-41ff-b5c9-08030fe1da78 req-c4037b2f-5251-4e29-b16f-c5a337a4b8f6 service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] No waiting events found dispatching network-vif-plugged-09744327-3d1a-4d1b-8f3c-8532ca1fef21 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 706.519561] env[65758]: WARNING nova.compute.manager [req-f8116dd4-0e63-41ff-b5c9-08030fe1da78 req-c4037b2f-5251-4e29-b16f-c5a337a4b8f6 service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Received unexpected event network-vif-plugged-09744327-3d1a-4d1b-8f3c-8532ca1fef21 for instance with vm_state building and task_state spawning. [ 706.545953] env[65758]: DEBUG nova.compute.utils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 706.556564] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 706.556798] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 706.557398] env[65758]: WARNING neutronclient.v2_0.client [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 706.557761] env[65758]: WARNING neutronclient.v2_0.client [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 706.558373] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 706.560831] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 706.593842] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8dac48f6-b505-4eb8-ac49-e9db0e95df99 tempest-InstanceActionsV221TestJSON-671489737 tempest-InstanceActionsV221TestJSON-671489737-project-member] Lock "a2010738-759b-480a-8360-2639788056b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.281s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.655323] env[65758]: DEBUG oslo_vmware.api [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4660144, 'name': PowerOffVM_Task, 'duration_secs': 0.272458} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.659230] env[65758]: DEBUG nova.policy [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68c293475b1641bda0a7f5a13f1cdbe1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4154e353eb4142178244814f4ebd6167', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 706.664737] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 706.665116] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 706.666310] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f57ab61-6700-4a59-8c8b-166ac968eb8c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.739283] env[65758]: DEBUG nova.compute.manager [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 706.739552] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 706.740783] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45ac0f8-36e9-4a8b-93c9-08458950a1da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.750418] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 706.752296] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7ffa15a-b29f-446d-b8c3-06c4d0576774 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.755508] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 706.756024] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 706.756226] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Deleting the datastore file [datastore2] e48a075b-41b3-4612-bd5f-0a158d707a2f {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 706.757027] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac0a356c-092b-4016-a775-4c680b087733 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.764062] env[65758]: DEBUG oslo_vmware.api [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 706.764062] env[65758]: value = "task-4660149" [ 706.764062] env[65758]: _type = "Task" [ 706.764062] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.767170] env[65758]: DEBUG oslo_vmware.api [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for the task: (returnval){ [ 706.767170] env[65758]: value = "task-4660150" [ 706.767170] env[65758]: _type = "Task" [ 706.767170] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.784113] env[65758]: DEBUG oslo_vmware.api [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4660150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.792422] env[65758]: DEBUG oslo_vmware.api [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660149, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.795606] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "refresh_cache-b7692c74-c919-45b4-991b-c06a530ff9ef" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.796136] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "refresh_cache-b7692c74-c919-45b4-991b-c06a530ff9ef" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.797669] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 706.993727] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660147, 'name': CreateVM_Task, 'duration_secs': 0.415594} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.993993] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.994695] env[65758]: WARNING neutronclient.v2_0.client [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 706.995134] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.995289] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.995614] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.995992] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f427c6c-ce3b-4327-9786-3a8306c303f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.006060] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 707.006060] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ba8826-28d1-d44d-78a6-9c06620f57f5" [ 707.006060] env[65758]: _type = "Task" [ 707.006060] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.026781] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ba8826-28d1-d44d-78a6-9c06620f57f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.057087] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 707.156257] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Successfully created port: bff20363-4d58-4158-b43d-6fd204aae97d {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 707.238398] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.238690] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.279898] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquiring lock "fb379346-f17a-4433-bb55-2b72025e9a61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.279898] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "fb379346-f17a-4433-bb55-2b72025e9a61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.297519] env[65758]: DEBUG oslo_vmware.api [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Task: {'id': task-4660150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299583} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.298050] env[65758]: DEBUG oslo_vmware.api [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660149, 'name': PowerOffVM_Task, 'duration_secs': 0.2945} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.298316] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 707.298511] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 707.298629] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 707.298790] env[65758]: INFO nova.compute.manager [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Took 1.18 seconds to destroy the instance on the hypervisor. [ 707.299070] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 707.299285] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 707.299444] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 707.299688] env[65758]: DEBUG nova.compute.manager [-] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 707.299785] env[65758]: DEBUG nova.network.neutron [-] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 707.300044] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 707.300600] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 707.300857] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 707.308229] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7e86130-c9b7-48fa-816b-74282b2a1559 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.319025] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 707.319025] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 707.367107] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3d55cc-989f-4e49-acc8-5f22db903af1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.377599] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61f84cb-f8ff-4208-bed7-8ca1ffa95f2e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.415839] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96aa5d8a-577d-41aa-a810-3fa3bcc14725 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.424861] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e40da53-f528-46fe-aa6e-c7dae9947a89 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.440510] env[65758]: DEBUG nova.compute.provider_tree [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.487822] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 707.487822] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 707.488078] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Deleting the datastore file [datastore1] b6b673e9-0ae1-4c7c-be53-e83641063cf8 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 707.488246] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f091fc2d-2236-4943-9d07-eb7b9f3c9f3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.496512] env[65758]: DEBUG oslo_vmware.api [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for the task: (returnval){ [ 707.496512] env[65758]: value = "task-4660152" [ 707.496512] env[65758]: _type = "Task" [ 707.496512] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.508607] env[65758]: DEBUG oslo_vmware.api [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660152, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.519582] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ba8826-28d1-d44d-78a6-9c06620f57f5, 'name': SearchDatastore_Task, 'duration_secs': 0.028046} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.519959] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.520234] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.520480] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.520665] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.520825] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.521088] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17fcfd18-e6f8-456b-96b4-1c23f74e8d09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.525311] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 707.530893] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.530893] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.531588] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12c17385-2c9a-4611-9405-3a789c23a974 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.537768] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 707.537768] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae2a7e-7c9a-329d-a93e-8732433d8382" [ 707.537768] env[65758]: _type = "Task" [ 707.537768] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.547152] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae2a7e-7c9a-329d-a93e-8732433d8382, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.554894] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 707.944251] env[65758]: DEBUG nova.scheduler.client.report [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 708.007413] env[65758]: DEBUG oslo_vmware.api [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Task: {'id': task-4660152, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264975} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.007738] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 708.008039] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 708.008314] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 708.008593] env[65758]: INFO nova.compute.manager [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Took 1.27 seconds to destroy the instance on the hypervisor. [ 708.008941] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 708.009287] env[65758]: DEBUG nova.compute.manager [-] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 708.009447] env[65758]: DEBUG nova.network.neutron [-] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 708.009821] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 708.010664] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 708.011175] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 708.049038] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae2a7e-7c9a-329d-a93e-8732433d8382, 'name': SearchDatastore_Task, 'duration_secs': 0.030072} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.049854] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6368c75d-6bd4-40c9-9e25-e4fd78cbba03 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.055687] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 708.055687] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a995d9-1545-6e99-180b-f71774c642c6" [ 708.055687] env[65758]: _type = "Task" [ 708.055687] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.063749] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a995d9-1545-6e99-180b-f71774c642c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.069964] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 708.094498] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 708.098387] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 708.099054] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 708.099054] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 708.099329] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 708.099537] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 708.099731] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 708.099969] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.100192] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 708.100575] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 708.100655] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 708.100822] env[65758]: DEBUG nova.virt.hardware [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 708.102079] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1f3ee0-cf9e-46c5-81fa-2074b033f8ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.111087] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9157fe0-aaaf-4ce2-aa24-5fdaa2058dc8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.139868] env[65758]: WARNING neutronclient.v2_0.client [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 708.140585] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 708.140880] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 708.413573] env[65758]: DEBUG nova.network.neutron [-] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 708.457393] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Updating instance_info_cache with network_info: [{"id": "47787883-24ce-41e2-9595-7d07b0d86022", "address": "fa:16:3e:e0:94:0f", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47787883-24", "ovs_interfaceid": "47787883-24ce-41e2-9595-7d07b0d86022", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 708.567494] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a995d9-1545-6e99-180b-f71774c642c6, 'name': SearchDatastore_Task, 'duration_secs': 0.033477} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.567777] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.568045] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba/ec1e2845-e73a-40ff-9b6c-1d8281859fba.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.568319] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79edc830-e88b-4c57-b66d-c7e896551648 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.576819] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 708.576819] env[65758]: value = "task-4660153" [ 708.576819] env[65758]: _type = "Task" [ 708.576819] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.585482] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660153, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.842555] env[65758]: DEBUG nova.network.neutron [-] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 708.904581] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Successfully updated port: bff20363-4d58-4158-b43d-6fd204aae97d {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 708.910073] env[65758]: DEBUG nova.compute.manager [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Received event network-vif-deleted-9f2a1cde-126e-4502-835b-0bb1647edbfe {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 708.910073] env[65758]: DEBUG nova.compute.manager [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Received event network-vif-deleted-67c82890-e746-45ff-9f1b-e905834b0064 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 708.910073] env[65758]: DEBUG nova.compute.manager [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Received event network-vif-plugged-47787883-24ce-41e2-9595-7d07b0d86022 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 708.910073] env[65758]: DEBUG oslo_concurrency.lockutils [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Acquiring lock "b7692c74-c919-45b4-991b-c06a530ff9ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.910446] env[65758]: DEBUG oslo_concurrency.lockutils [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Lock "b7692c74-c919-45b4-991b-c06a530ff9ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.910619] env[65758]: DEBUG oslo_concurrency.lockutils [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Lock "b7692c74-c919-45b4-991b-c06a530ff9ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.910803] env[65758]: DEBUG nova.compute.manager [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] No waiting events found dispatching network-vif-plugged-47787883-24ce-41e2-9595-7d07b0d86022 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 708.911950] env[65758]: WARNING nova.compute.manager [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Received unexpected event network-vif-plugged-47787883-24ce-41e2-9595-7d07b0d86022 for instance with vm_state building and task_state spawning. [ 708.911950] env[65758]: DEBUG nova.compute.manager [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Received event network-changed-47787883-24ce-41e2-9595-7d07b0d86022 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 708.911950] env[65758]: DEBUG nova.compute.manager [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Refreshing instance network info cache due to event network-changed-47787883-24ce-41e2-9595-7d07b0d86022. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 708.911950] env[65758]: DEBUG oslo_concurrency.lockutils [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Acquiring lock "refresh_cache-b7692c74-c919-45b4-991b-c06a530ff9ef" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.915973] env[65758]: INFO nova.compute.manager [-] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Took 1.62 seconds to deallocate network for instance. [ 708.957529] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.914s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.963291] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.725s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.964871] env[65758]: INFO nova.compute.claims [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.968372] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "refresh_cache-b7692c74-c919-45b4-991b-c06a530ff9ef" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.968886] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Instance network_info: |[{"id": "47787883-24ce-41e2-9595-7d07b0d86022", "address": "fa:16:3e:e0:94:0f", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47787883-24", "ovs_interfaceid": "47787883-24ce-41e2-9595-7d07b0d86022", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 708.969636] env[65758]: DEBUG oslo_concurrency.lockutils [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Acquired lock "refresh_cache-b7692c74-c919-45b4-991b-c06a530ff9ef" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.970275] env[65758]: DEBUG nova.network.neutron [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Refreshing network info cache for port 47787883-24ce-41e2-9595-7d07b0d86022 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 708.971387] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:94:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '085fb0ff-9285-4f1d-a008-a14da4844357', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47787883-24ce-41e2-9595-7d07b0d86022', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.979537] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Creating folder: Project (4154e353eb4142178244814f4ebd6167). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 708.982353] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf563a47-d838-4185-9b02-6b51eea3b260 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.999308] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Created folder: Project (4154e353eb4142178244814f4ebd6167) in parent group-v909763. [ 708.999572] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Creating folder: Instances. Parent ref: group-v909855. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.000581] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c76d2d9-ebc9-4e33-9e4a-c2769e2e3e0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.019059] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Created folder: Instances in parent group-v909855. [ 709.021023] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 709.021023] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 709.022129] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9168cea-88c7-44db-a8f8-4042d933dc24 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.048875] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 709.048875] env[65758]: value = "task-4660156" [ 709.048875] env[65758]: _type = "Task" [ 709.048875] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.062453] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660156, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.091701] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660153, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.346392] env[65758]: INFO nova.compute.manager [-] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Took 1.34 seconds to deallocate network for instance. [ 709.413663] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "refresh_cache-de8f3600-b25f-4396-af37-ea703587979c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.414106] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "refresh_cache-de8f3600-b25f-4396-af37-ea703587979c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.414440] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 709.425425] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.481439] env[65758]: WARNING neutronclient.v2_0.client [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 709.482404] env[65758]: WARNING openstack [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 709.482838] env[65758]: WARNING openstack [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 709.563277] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660156, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.571545] env[65758]: INFO nova.scheduler.client.report [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleted allocation for migration 28721f73-0009-4427-b697-d46294cf6cb7 [ 709.587442] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660153, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.587632] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba/ec1e2845-e73a-40ff-9b6c-1d8281859fba.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.587847] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.588724] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e0b27c2-93e5-4884-862d-9838553ba4ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.597170] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 709.597170] env[65758]: value = "task-4660157" [ 709.597170] env[65758]: _type = "Task" [ 709.597170] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.610706] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.681705] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "a662eac8-07e2-47f1-a4dd-9abbe824817d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.682843] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "a662eac8-07e2-47f1-a4dd-9abbe824817d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.797799] env[65758]: WARNING neutronclient.v2_0.client [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 709.798480] env[65758]: WARNING openstack [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 709.798981] env[65758]: WARNING openstack [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 709.819617] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquiring lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.819757] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.820170] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquiring lock "e60efbcd-1c4e-40a1-8bc1-893daa511073-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.820170] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.820299] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.822881] env[65758]: INFO nova.compute.manager [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Terminating instance [ 709.838960] env[65758]: DEBUG nova.compute.manager [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Received event network-changed-09744327-3d1a-4d1b-8f3c-8532ca1fef21 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 709.839472] env[65758]: DEBUG nova.compute.manager [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Refreshing instance network info cache due to event network-changed-09744327-3d1a-4d1b-8f3c-8532ca1fef21. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 709.839898] env[65758]: DEBUG oslo_concurrency.lockutils [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] Acquiring lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.840095] env[65758]: DEBUG oslo_concurrency.lockutils [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] Acquired lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.840363] env[65758]: DEBUG nova.network.neutron [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Refreshing network info cache for port 09744327-3d1a-4d1b-8f3c-8532ca1fef21 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 709.857816] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.908450] env[65758]: DEBUG nova.network.neutron [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Updated VIF entry in instance network info cache for port 47787883-24ce-41e2-9595-7d07b0d86022. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 709.909179] env[65758]: DEBUG nova.network.neutron [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Updating instance_info_cache with network_info: [{"id": "47787883-24ce-41e2-9595-7d07b0d86022", "address": "fa:16:3e:e0:94:0f", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47787883-24", "ovs_interfaceid": "47787883-24ce-41e2-9595-7d07b0d86022", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 709.918088] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 709.919210] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 709.959877] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 710.059268] env[65758]: WARNING neutronclient.v2_0.client [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 710.060066] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 710.060433] env[65758]: WARNING openstack [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 710.075119] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660156, 'name': CreateVM_Task, 'duration_secs': 0.521117} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.075486] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 710.075973] env[65758]: WARNING neutronclient.v2_0.client [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 710.076339] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.076482] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.076777] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 710.077741] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4e699b-1d64-4916-92c8-7aac6f92b958 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 23.971s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.078812] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc217f2b-3c98-44bd-8ca1-b88a7f0ae988 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.084504] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 710.084504] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522fd04d-104b-3bb0-e6c6-0d59cd936798" [ 710.084504] env[65758]: _type = "Task" [ 710.084504] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.097099] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522fd04d-104b-3bb0-e6c6-0d59cd936798, 'name': SearchDatastore_Task} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.097408] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.097882] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 710.097954] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.098084] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.098280] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.099250] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-681ca077-1cad-4aaa-993c-9648c5c94123 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.114471] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070585} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.114761] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.116502] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059a76cf-247d-4587-ac97-fb4628b50c9f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.119227] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.119684] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 710.120430] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee047835-d32e-4dfd-86a0-235117c25027 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.151148] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba/ec1e2845-e73a-40ff-9b6c-1d8281859fba.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.158038] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-847eb980-36b8-4a46-8050-a6e8cc08489d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.169524] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 710.169524] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c3e295-5f41-e8bf-4512-7855afb2337b" [ 710.169524] env[65758]: _type = "Task" [ 710.169524] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.179108] env[65758]: DEBUG nova.network.neutron [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Updating instance_info_cache with network_info: [{"id": "bff20363-4d58-4158-b43d-6fd204aae97d", "address": "fa:16:3e:d2:31:a7", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbff20363-4d", "ovs_interfaceid": "bff20363-4d58-4158-b43d-6fd204aae97d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 710.179108] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 710.179108] env[65758]: value = "task-4660158" [ 710.179108] env[65758]: _type = "Task" [ 710.179108] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.186180] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c3e295-5f41-e8bf-4512-7855afb2337b, 'name': SearchDatastore_Task, 'duration_secs': 0.010315} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.190246] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a29bdec4-efa8-4ae0-98a7-7fca83e3a005 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.196627] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660158, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.201037] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 710.201037] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520863da-1b85-3021-beaf-e8fdb56284a2" [ 710.201037] env[65758]: _type = "Task" [ 710.201037] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.214115] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520863da-1b85-3021-beaf-e8fdb56284a2, 'name': SearchDatastore_Task, 'duration_secs': 0.010723} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.214351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.214594] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] b7692c74-c919-45b4-991b-c06a530ff9ef/b7692c74-c919-45b4-991b-c06a530ff9ef.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 710.214860] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be68212a-6891-45e2-a739-c58d496b63c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.226077] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 710.226077] env[65758]: value = "task-4660159" [ 710.226077] env[65758]: _type = "Task" [ 710.226077] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.238039] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.332163] env[65758]: DEBUG nova.compute.manager [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 710.332436] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 710.333357] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6035a442-4384-4281-a18b-39e5b017fc43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.345428] env[65758]: WARNING neutronclient.v2_0.client [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 710.346268] env[65758]: WARNING openstack [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 710.346625] env[65758]: WARNING openstack [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 710.354252] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 710.357126] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eab957fc-6a7c-454b-a049-3e0e8a99cef6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.367025] env[65758]: DEBUG oslo_vmware.api [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 710.367025] env[65758]: value = "task-4660160" [ 710.367025] env[65758]: _type = "Task" [ 710.367025] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.376107] env[65758]: DEBUG oslo_vmware.api [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4660160, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.413107] env[65758]: DEBUG oslo_concurrency.lockutils [req-5de590b3-fb7a-402b-82de-7f5eae8f8826 req-b54ac6c8-dffe-4f10-a66b-d365d6cfb4a3 service nova] Releasing lock "refresh_cache-b7692c74-c919-45b4-991b-c06a530ff9ef" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.559198] env[65758]: WARNING neutronclient.v2_0.client [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 710.559698] env[65758]: WARNING openstack [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 710.560178] env[65758]: WARNING openstack [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 710.625500] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquiring lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.625651] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.658779] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4db828a-1275-4eae-b81b-0c430b43fb74 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.668321] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8f008a-c52e-42ae-932e-e677da91362e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.705253] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "refresh_cache-de8f3600-b25f-4396-af37-ea703587979c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.705711] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Instance network_info: |[{"id": "bff20363-4d58-4158-b43d-6fd204aae97d", "address": "fa:16:3e:d2:31:a7", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbff20363-4d", "ovs_interfaceid": "bff20363-4d58-4158-b43d-6fd204aae97d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 710.708946] env[65758]: DEBUG nova.network.neutron [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Updated VIF entry in instance network info cache for port 09744327-3d1a-4d1b-8f3c-8532ca1fef21. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 710.709446] env[65758]: DEBUG nova.network.neutron [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Updating instance_info_cache with network_info: [{"id": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "address": "fa:16:3e:01:ce:ae", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09744327-3d", "ovs_interfaceid": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 710.710864] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:31:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '085fb0ff-9285-4f1d-a008-a14da4844357', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bff20363-4d58-4158-b43d-6fd204aae97d', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.719202] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 710.723399] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e976290-2d91-4d17-9160-d0410ebe3208 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.727712] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de8f3600-b25f-4396-af37-ea703587979c] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 710.728121] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b25e6073-49e3-4d87-a23b-a6d32204b260 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.752377] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660158, 'name': ReconfigVM_Task, 'duration_secs': 0.340856} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.755656] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Reconfigured VM instance instance-00000021 to attach disk [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba/ec1e2845-e73a-40ff-9b6c-1d8281859fba.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.756952] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad73b4af-9602-4cca-8b65-f63da3fc1073 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.760043] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825a0531-d15d-4717-beab-70fc5764bb98 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.769301] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660159, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.769627] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.769627] env[65758]: value = "task-4660161" [ 710.769627] env[65758]: _type = "Task" [ 710.769627] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.787550] env[65758]: DEBUG nova.compute.provider_tree [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.793079] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 710.793079] env[65758]: value = "task-4660162" [ 710.793079] env[65758]: _type = "Task" [ 710.793079] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.793300] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660161, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.804577] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660162, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.877266] env[65758]: DEBUG oslo_vmware.api [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4660160, 'name': PowerOffVM_Task, 'duration_secs': 0.258831} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.877818] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 710.878329] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 710.878725] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71e2b3dc-0dee-41e5-a8c7-aac3e522cc2e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.958413] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 710.958413] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 710.958598] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Deleting the datastore file [datastore2] e60efbcd-1c4e-40a1-8bc1-893daa511073 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 710.959288] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ca4e75a-164e-41b0-a553-4e7c23fb9d9a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.967853] env[65758]: DEBUG oslo_vmware.api [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for the task: (returnval){ [ 710.967853] env[65758]: value = "task-4660164" [ 710.967853] env[65758]: _type = "Task" [ 710.967853] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.979123] env[65758]: DEBUG oslo_vmware.api [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4660164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.184882] env[65758]: DEBUG nova.compute.manager [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] Received event network-vif-plugged-bff20363-4d58-4158-b43d-6fd204aae97d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 711.184882] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Acquiring lock "de8f3600-b25f-4396-af37-ea703587979c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.184882] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Lock "de8f3600-b25f-4396-af37-ea703587979c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.184882] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Lock "de8f3600-b25f-4396-af37-ea703587979c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.185269] env[65758]: DEBUG nova.compute.manager [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] No waiting events found dispatching network-vif-plugged-bff20363-4d58-4158-b43d-6fd204aae97d {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 711.185269] env[65758]: WARNING nova.compute.manager [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] Received unexpected event network-vif-plugged-bff20363-4d58-4158-b43d-6fd204aae97d for instance with vm_state building and task_state spawning. [ 711.185836] env[65758]: DEBUG nova.compute.manager [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] Received event network-changed-bff20363-4d58-4158-b43d-6fd204aae97d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 711.185836] env[65758]: DEBUG nova.compute.manager [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] Refreshing instance network info cache due to event network-changed-bff20363-4d58-4158-b43d-6fd204aae97d. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 711.185836] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Acquiring lock "refresh_cache-de8f3600-b25f-4396-af37-ea703587979c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.185836] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Acquired lock "refresh_cache-de8f3600-b25f-4396-af37-ea703587979c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.186139] env[65758]: DEBUG nova.network.neutron [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] Refreshing network info cache for port bff20363-4d58-4158-b43d-6fd204aae97d {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 711.227458] env[65758]: DEBUG oslo_concurrency.lockutils [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] Releasing lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.227718] env[65758]: DEBUG nova.compute.manager [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Received event network-vif-deleted-30a1632d-59ad-4b45-bb29-73404b1abc7c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 711.227897] env[65758]: DEBUG nova.compute.manager [req-4d784c3d-315b-4045-abaf-97e49de7cb25 req-372ae975-6220-4055-b615-9023db13bce5 service nova] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Received event network-vif-deleted-3e23d45e-b849-47dd-9649-500080939b87 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 711.238654] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550355} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.238897] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] b7692c74-c919-45b4-991b-c06a530ff9ef/b7692c74-c919-45b4-991b-c06a530ff9ef.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 711.239107] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.239387] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4763e5c-fc49-40f4-b02c-3093cffd2846 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.247259] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 711.247259] env[65758]: value = "task-4660165" [ 711.247259] env[65758]: _type = "Task" [ 711.247259] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.258397] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660165, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.283541] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660161, 'name': CreateVM_Task, 'duration_secs': 0.405403} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.283806] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de8f3600-b25f-4396-af37-ea703587979c] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 711.284413] env[65758]: WARNING neutronclient.v2_0.client [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.285173] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.285365] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.285696] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 711.286096] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe9780b-ccc8-49eb-a2ee-bf8320e8927a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.292319] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 711.292319] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f41a8b-6f68-78be-20d0-3e6738751211" [ 711.292319] env[65758]: _type = "Task" [ 711.292319] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.299647] env[65758]: DEBUG nova.scheduler.client.report [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 711.314461] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f41a8b-6f68-78be-20d0-3e6738751211, 'name': SearchDatastore_Task, 'duration_secs': 0.012339} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.318850] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.320076] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.320076] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.320076] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.320076] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.320538] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660162, 'name': Rename_Task, 'duration_secs': 0.164927} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.320748] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2aceb9a-9ded-4a9e-8b27-1ca15f6d7108 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.323496] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.324475] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02edbba9-d655-4414-87a6-0869c91672c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.332649] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 711.332649] env[65758]: value = "task-4660166" [ 711.332649] env[65758]: _type = "Task" [ 711.332649] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.334201] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.334416] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 711.337895] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31752e38-be59-4422-9167-062457529fa9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.346426] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660166, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.348473] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 711.348473] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5296ed39-44d0-84bf-ee2b-b1613c210cb2" [ 711.348473] env[65758]: _type = "Task" [ 711.348473] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.358231] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5296ed39-44d0-84bf-ee2b-b1613c210cb2, 'name': SearchDatastore_Task, 'duration_secs': 0.010253} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.359513] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fcf50af-a6c9-4c26-a8ea-464885c5c975 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.366549] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 711.366549] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528e2d58-e0b0-9e6e-a762-8d47472d5d38" [ 711.366549] env[65758]: _type = "Task" [ 711.366549] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.376224] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528e2d58-e0b0-9e6e-a762-8d47472d5d38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.483412] env[65758]: DEBUG oslo_vmware.api [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Task: {'id': task-4660164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228398} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.483701] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 711.483876] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 711.484055] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 711.484221] env[65758]: INFO nova.compute.manager [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Took 1.15 seconds to destroy the instance on the hypervisor. [ 711.484469] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 711.484692] env[65758]: DEBUG nova.compute.manager [-] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 711.484766] env[65758]: DEBUG nova.network.neutron [-] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 711.485044] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.485607] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 711.485852] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 711.659865] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.689466] env[65758]: WARNING neutronclient.v2_0.client [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 711.690197] env[65758]: WARNING openstack [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 711.690578] env[65758]: WARNING openstack [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 711.759613] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660165, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081374} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.759997] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 711.760895] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c806b6de-890d-4004-ab5c-36fc7d45364e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.790205] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] b7692c74-c919-45b4-991b-c06a530ff9ef/b7692c74-c919-45b4-991b-c06a530ff9ef.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.790432] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2aeb95b6-6aaf-4abe-8efa-037ed1645dad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.808970] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.846s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.810461] env[65758]: DEBUG nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 711.812995] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.886s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.813231] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.815684] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.721s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.817642] env[65758]: INFO nova.compute.claims [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.821822] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 711.821822] env[65758]: value = "task-4660167" [ 711.821822] env[65758]: _type = "Task" [ 711.821822] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.836770] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660167, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.851738] env[65758]: DEBUG oslo_vmware.api [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660166, 'name': PowerOnVM_Task, 'duration_secs': 0.503341} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.851738] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 711.851738] env[65758]: INFO nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Took 9.26 seconds to spawn the instance on the hypervisor. [ 711.851738] env[65758]: DEBUG nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 711.851738] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e4a818-485d-4fee-ac6a-8195633bfa14 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.857292] env[65758]: INFO nova.scheduler.client.report [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Deleted allocations for instance 24379189-b10a-4ef6-a3f6-b7bb43029dab [ 711.880510] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528e2d58-e0b0-9e6e-a762-8d47472d5d38, 'name': SearchDatastore_Task, 'duration_secs': 0.012647} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.880694] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.882335] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] de8f3600-b25f-4396-af37-ea703587979c/de8f3600-b25f-4396-af37-ea703587979c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 711.882335] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8f9a57e-486e-4410-aec8-4df05da63a1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.889450] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 711.889450] env[65758]: value = "task-4660168" [ 711.889450] env[65758]: _type = "Task" [ 711.889450] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.901177] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.165115] env[65758]: INFO nova.compute.manager [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Rebuilding instance [ 712.227149] env[65758]: DEBUG nova.compute.manager [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 712.228596] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57c820c-e9d9-494a-976b-c7bafc2b204d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.323140] env[65758]: DEBUG nova.compute.utils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 712.329158] env[65758]: DEBUG nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 712.329158] env[65758]: DEBUG nova.network.neutron [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 712.329158] env[65758]: WARNING neutronclient.v2_0.client [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 712.329645] env[65758]: WARNING neutronclient.v2_0.client [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 712.330549] env[65758]: WARNING openstack [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 712.331037] env[65758]: WARNING openstack [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 712.353065] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660167, 'name': ReconfigVM_Task, 'duration_secs': 0.538412} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.354093] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Reconfigured VM instance instance-00000022 to attach disk [datastore2] b7692c74-c919-45b4-991b-c06a530ff9ef/b7692c74-c919-45b4-991b-c06a530ff9ef.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.354956] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-895b1470-77ec-4da6-9678-e046ec053b4a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.367471] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 712.367471] env[65758]: value = "task-4660169" [ 712.367471] env[65758]: _type = "Task" [ 712.367471] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.368035] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c6546d70-5d37-4a2c-9665-fd1847a79acb tempest-ServersAdmin275Test-556681572 tempest-ServersAdmin275Test-556681572-project-member] Lock "24379189-b10a-4ef6-a3f6-b7bb43029dab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.580s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.387047] env[65758]: INFO nova.compute.manager [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Took 28.43 seconds to build instance. [ 712.388408] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660169, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.400772] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660168, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.574537] env[65758]: DEBUG nova.policy [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3338c19613c041abb681fa6cc661652a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e114eef3998848699a9a086fee86db29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 712.826681] env[65758]: DEBUG nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 712.884841] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660169, 'name': Rename_Task, 'duration_secs': 0.177006} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.885607] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 712.885607] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b24562c3-7eb3-4a07-928f-7db261f0970c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.889749] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e3d3654-db7f-428d-be07-0e0e81b9172d tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.954s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.904680] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 712.904680] env[65758]: value = "task-4660170" [ 712.904680] env[65758]: _type = "Task" [ 712.904680] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.917742] env[65758]: DEBUG nova.network.neutron [-] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 712.920161] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550333} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.920161] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] de8f3600-b25f-4396-af37-ea703587979c/de8f3600-b25f-4396-af37-ea703587979c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 712.920515] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.920605] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d6bbdc3-1e79-4fb6-86d2-4687b26eb105 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.926412] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660170, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.932305] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 712.932305] env[65758]: value = "task-4660171" [ 712.932305] env[65758]: _type = "Task" [ 712.932305] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.948212] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660171, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.952097] env[65758]: WARNING neutronclient.v2_0.client [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 712.952097] env[65758]: WARNING openstack [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 712.952382] env[65758]: WARNING openstack [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 713.005716] env[65758]: DEBUG nova.network.neutron [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Successfully created port: 0cad03bd-bdfb-4780-a072-70a72be1d8b2 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 713.244453] env[65758]: DEBUG nova.network.neutron [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] Updated VIF entry in instance network info cache for port bff20363-4d58-4158-b43d-6fd204aae97d. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 713.244986] env[65758]: DEBUG nova.network.neutron [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] Updating instance_info_cache with network_info: [{"id": "bff20363-4d58-4158-b43d-6fd204aae97d", "address": "fa:16:3e:d2:31:a7", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbff20363-4d", "ovs_interfaceid": "bff20363-4d58-4158-b43d-6fd204aae97d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 713.255924] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 713.256139] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f807f45-0d65-4b27-aab0-30dcf4e8789a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.265996] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 713.265996] env[65758]: value = "task-4660172" [ 713.265996] env[65758]: _type = "Task" [ 713.265996] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.276210] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.403873] env[65758]: DEBUG nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 713.422041] env[65758]: INFO nova.compute.manager [-] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Took 1.94 seconds to deallocate network for instance. [ 713.422041] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660170, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.447983] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660171, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073318} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.448520] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.449400] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99857a40-70c0-4c72-8f1c-76eec1b93d3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.474630] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] de8f3600-b25f-4396-af37-ea703587979c/de8f3600-b25f-4396-af37-ea703587979c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.478593] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09224875-8f9c-4354-8a6e-e749c804cba7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.503618] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 713.503618] env[65758]: value = "task-4660173" [ 713.503618] env[65758]: _type = "Task" [ 713.503618] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.514531] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660173, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.515192] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884310a8-c0db-46cf-addb-e3ee40fc2ae6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.525533] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a48bfe-c708-4df7-a4df-cf88a4340228 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.561792] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1607525c-45f1-46be-84f3-c1c28cd66d2c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.572050] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245dbb89-aa11-4ba6-ab64-2cf010a2e161 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.586930] env[65758]: DEBUG nova.compute.provider_tree [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.748303] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1b46cd-88f5-4334-a23b-82b0f0ffdd5e req-ea97503d-fdc2-46b0-9cd8-fab0c38a4778 service nova] Releasing lock "refresh_cache-de8f3600-b25f-4396-af37-ea703587979c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.778702] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660172, 'name': PowerOffVM_Task, 'duration_secs': 0.233677} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.778891] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 713.779176] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.780141] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a70fcb-7611-40dd-ad65-f9fe3d5cd5d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.788403] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.788767] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-672a9546-b81f-478d-8869-8b971ff9f4fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.841237] env[65758]: DEBUG nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 713.867377] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.867606] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.867780] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleting the datastore file [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.870652] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74287998-db55-42dd-b33d-408eba44efb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.879051] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 713.879388] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 713.879575] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 713.879769] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 713.879971] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 713.880587] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 713.880863] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 713.881060] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 713.881244] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 713.881431] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 713.881611] env[65758]: DEBUG nova.virt.hardware [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 713.882857] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8b64ec-7a33-4d2a-954f-e173e2360874 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.887442] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 713.887442] env[65758]: value = "task-4660175" [ 713.887442] env[65758]: _type = "Task" [ 713.887442] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.895467] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6a9e4a-28b7-404a-a04c-79a6c5dbd999 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.902585] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.923913] env[65758]: DEBUG nova.compute.manager [req-454fd722-cb76-4607-8d1c-533296e0800a req-d9239c93-11ca-4a23-ab39-285bb624bc9b service nova] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Received event network-vif-deleted-afb9abca-e097-4678-9ae2-5b3775cf16e9 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 713.938195] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.938674] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660170, 'name': PowerOnVM_Task, 'duration_secs': 0.915678} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.939065] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.939404] env[65758]: INFO nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Took 8.64 seconds to spawn the instance on the hypervisor. [ 713.939738] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 713.942078] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040d0b2f-aa96-4148-bac8-12ab97e20053 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.944806] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.016351] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660173, 'name': ReconfigVM_Task, 'duration_secs': 0.346358} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.016809] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Reconfigured VM instance instance-00000023 to attach disk [datastore2] de8f3600-b25f-4396-af37-ea703587979c/de8f3600-b25f-4396-af37-ea703587979c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 714.017878] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47a43117-83d1-4067-9a83-7660e5071c67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.028501] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 714.028501] env[65758]: value = "task-4660176" [ 714.028501] env[65758]: _type = "Task" [ 714.028501] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.039152] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660176, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.091884] env[65758]: DEBUG nova.scheduler.client.report [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 714.398987] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168563} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.399277] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.399459] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.399645] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.463202] env[65758]: INFO nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Took 29.10 seconds to build instance. [ 714.539644] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660176, 'name': Rename_Task, 'duration_secs': 0.155062} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.539983] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.540279] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b44614d4-3a65-45c1-a3b4-4708edce5992 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.548167] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 714.548167] env[65758]: value = "task-4660177" [ 714.548167] env[65758]: _type = "Task" [ 714.548167] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.548654] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "f7a14628-cc55-41fa-ae89-3958855df8a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.548881] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "f7a14628-cc55-41fa-ae89-3958855df8a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.549084] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "f7a14628-cc55-41fa-ae89-3958855df8a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.549312] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "f7a14628-cc55-41fa-ae89-3958855df8a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.549470] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "f7a14628-cc55-41fa-ae89-3958855df8a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.551982] env[65758]: INFO nova.compute.manager [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Terminating instance [ 714.564324] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.581987] env[65758]: DEBUG nova.network.neutron [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Successfully updated port: 0cad03bd-bdfb-4780-a072-70a72be1d8b2 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 714.596973] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.781s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.598640] env[65758]: DEBUG nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 714.604704] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.138s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.606847] env[65758]: INFO nova.compute.claims [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.965817] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7692c74-c919-45b4-991b-c06a530ff9ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.615s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.062533] env[65758]: DEBUG nova.compute.manager [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 715.062846] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 715.063111] env[65758]: DEBUG oslo_vmware.api [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660177, 'name': PowerOnVM_Task, 'duration_secs': 0.503948} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.063861] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8731a5a-5d7c-4800-bf35-88d90a329c58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.066947] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 715.067190] env[65758]: INFO nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Took 7.00 seconds to spawn the instance on the hypervisor. [ 715.067359] env[65758]: DEBUG nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 715.068161] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc624b2b-ba3e-4764-91ec-9be847251c59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.082151] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 715.083855] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d5403b8-6779-47fc-8cff-8f33bf76406d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.084710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "refresh_cache-4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.086381] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "refresh_cache-4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.086381] env[65758]: DEBUG nova.network.neutron [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 715.114803] env[65758]: DEBUG nova.compute.utils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 715.119728] env[65758]: DEBUG nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 715.119728] env[65758]: DEBUG nova.network.neutron [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 715.119728] env[65758]: WARNING neutronclient.v2_0.client [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 715.119991] env[65758]: WARNING neutronclient.v2_0.client [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 715.124457] env[65758]: WARNING openstack [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 715.125042] env[65758]: WARNING openstack [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 715.190547] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 715.190547] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 715.190547] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Deleting the datastore file [datastore1] f7a14628-cc55-41fa-ae89-3958855df8a7 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 715.191335] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de803baf-fbdb-4963-8ab4-0becaf7f4121 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.201480] env[65758]: DEBUG oslo_vmware.api [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 715.201480] env[65758]: value = "task-4660179" [ 715.201480] env[65758]: _type = "Task" [ 715.201480] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.213428] env[65758]: DEBUG oslo_vmware.api [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4660179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.240471] env[65758]: DEBUG nova.policy [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '638c09153b9e4ec09f9fb5f87ba1e0ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '693b129cd84f4eee9971e7221e92c3e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 715.442632] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 715.443048] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 715.443325] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 715.443619] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 715.443934] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 715.444155] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 715.444441] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.444617] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 715.444966] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 715.444966] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 715.445214] env[65758]: DEBUG nova.virt.hardware [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 715.446299] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbc0ab1-417f-4887-8503-49c621116980 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.456673] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b717ac-2dcc-470b-ab7d-eb2c0d277da2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.472347] env[65758]: DEBUG nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 715.478396] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:7a:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4a1d8f6-c296-4e9a-9582-489f0ebc77a6', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.484147] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 715.484896] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 715.485959] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fdaa675-a67e-4f0f-b42d-43a8e50177ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.519692] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.519692] env[65758]: value = "task-4660180" [ 715.519692] env[65758]: _type = "Task" [ 715.519692] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.528654] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660180, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.588706] env[65758]: WARNING openstack [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 715.589502] env[65758]: WARNING openstack [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 715.605800] env[65758]: INFO nova.compute.manager [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Took 29.51 seconds to build instance. [ 715.620273] env[65758]: DEBUG nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 715.628754] env[65758]: DEBUG nova.network.neutron [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Successfully created port: bc11b657-640b-458c-9870-62fd7fdbe88a {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 715.694026] env[65758]: DEBUG nova.network.neutron [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 715.715037] env[65758]: DEBUG oslo_vmware.api [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4660179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.486728} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.715037] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 715.715037] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 715.715037] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.715235] env[65758]: INFO nova.compute.manager [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Took 0.65 seconds to destroy the instance on the hypervisor. [ 715.715408] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 715.715895] env[65758]: DEBUG nova.compute.manager [-] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 715.716069] env[65758]: DEBUG nova.network.neutron [-] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 715.716364] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 715.717255] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 715.717255] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 716.002197] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.037383] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660180, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.089457] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 716.109776] env[65758]: DEBUG oslo_concurrency.lockutils [None req-adb2aa75-f56c-4ca3-beb4-c2ef90c3e39c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "de8f3600-b25f-4396-af37-ea703587979c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.713s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.212422] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e921f8-52d4-45d9-981d-1f8e11b2e317 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.221456] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f9d12a-6934-46e3-8d7c-53fbd40d989a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.253436] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc5cb79-95d4-444b-8dea-8e05a482dcdd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.263302] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be815794-ec7d-4060-bd70-5d2639be2f7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.279239] env[65758]: DEBUG nova.compute.provider_tree [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.515584] env[65758]: WARNING neutronclient.v2_0.client [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 716.516399] env[65758]: WARNING openstack [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 716.516687] env[65758]: WARNING openstack [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 716.546945] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660180, 'name': CreateVM_Task, 'duration_secs': 0.523905} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.547110] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 716.547710] env[65758]: WARNING neutronclient.v2_0.client [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 716.548268] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.548421] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.552021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 716.552021] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae3b4be8-f828-4c69-8f96-68b24abcf830 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.556356] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 716.556356] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f59ac7-311b-3ed7-e9f5-5a313c0d2b9c" [ 716.556356] env[65758]: _type = "Task" [ 716.556356] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.568804] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f59ac7-311b-3ed7-e9f5-5a313c0d2b9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.612058] env[65758]: DEBUG nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 716.640209] env[65758]: DEBUG nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 716.678547] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 716.678877] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 716.678999] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 716.679503] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 716.679503] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 716.679744] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 716.680044] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 716.680363] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 716.680450] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 716.680612] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 716.680814] env[65758]: DEBUG nova.virt.hardware [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 716.682251] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d195e2-ffb3-4d73-92a9-447dfad89c56 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.693456] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daea73f1-cfef-4048-9e04-589af50990d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.782680] env[65758]: DEBUG nova.scheduler.client.report [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 716.975574] env[65758]: DEBUG nova.network.neutron [-] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 717.068706] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f59ac7-311b-3ed7-e9f5-5a313c0d2b9c, 'name': SearchDatastore_Task, 'duration_secs': 0.012463} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.069061] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.069271] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.069587] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.069758] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.069938] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 717.070237] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bc93027-f42b-4f25-8ada-53f7ff281935 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.082467] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 717.082675] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 717.083452] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2451a56-5608-440f-81d1-74d1b1126a9a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.090276] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 717.090276] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52728b1c-3a05-e497-aacb-2aaaf9e2332a" [ 717.090276] env[65758]: _type = "Task" [ 717.090276] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.099627] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52728b1c-3a05-e497-aacb-2aaaf9e2332a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.139787] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.289281] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.290671] env[65758]: DEBUG nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 717.306937] env[65758]: DEBUG nova.network.neutron [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Successfully updated port: bc11b657-640b-458c-9870-62fd7fdbe88a {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 717.308872] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.799s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.309292] env[65758]: DEBUG nova.objects.instance [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lazy-loading 'resources' on Instance uuid 483765b5-c63c-4aac-9082-519bbc4e6eb5 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 717.370508] env[65758]: DEBUG nova.network.neutron [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Updating instance_info_cache with network_info: [{"id": "0cad03bd-bdfb-4780-a072-70a72be1d8b2", "address": "fa:16:3e:6f:bb:07", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cad03bd-bd", "ovs_interfaceid": "0cad03bd-bdfb-4780-a072-70a72be1d8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 717.479086] env[65758]: INFO nova.compute.manager [-] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Took 1.76 seconds to deallocate network for instance. [ 717.608509] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52728b1c-3a05-e497-aacb-2aaaf9e2332a, 'name': SearchDatastore_Task, 'duration_secs': 0.035844} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.608509] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-534749f4-6941-4b1b-a91f-ed7952f6e9fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.620873] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 717.620873] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525d1194-381e-275c-6142-4321a806b06c" [ 717.620873] env[65758]: _type = "Task" [ 717.620873] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.633164] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525d1194-381e-275c-6142-4321a806b06c, 'name': SearchDatastore_Task, 'duration_secs': 0.012328} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.633164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.633736] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9/8a7f1d79-97ac-4503-a4ed-c99e4f6718c9.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 717.633736] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ab1b6be-3ab4-4f78-be7d-9cd19891a98c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.642308] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 717.642308] env[65758]: value = "task-4660181" [ 717.642308] env[65758]: _type = "Task" [ 717.642308] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.651875] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.699368] env[65758]: DEBUG nova.compute.manager [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Received event network-vif-plugged-0cad03bd-bdfb-4780-a072-70a72be1d8b2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 717.699483] env[65758]: DEBUG oslo_concurrency.lockutils [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Acquiring lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.699708] env[65758]: DEBUG oslo_concurrency.lockutils [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.700028] env[65758]: DEBUG oslo_concurrency.lockutils [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.700362] env[65758]: DEBUG nova.compute.manager [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] No waiting events found dispatching network-vif-plugged-0cad03bd-bdfb-4780-a072-70a72be1d8b2 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 717.700425] env[65758]: WARNING nova.compute.manager [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Received unexpected event network-vif-plugged-0cad03bd-bdfb-4780-a072-70a72be1d8b2 for instance with vm_state building and task_state spawning. [ 717.700620] env[65758]: DEBUG nova.compute.manager [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Received event network-changed-0cad03bd-bdfb-4780-a072-70a72be1d8b2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 717.700765] env[65758]: DEBUG nova.compute.manager [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Refreshing instance network info cache due to event network-changed-0cad03bd-bdfb-4780-a072-70a72be1d8b2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 717.701057] env[65758]: DEBUG oslo_concurrency.lockutils [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Acquiring lock "refresh_cache-4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.812682] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.812879] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.813079] env[65758]: DEBUG nova.network.neutron [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 717.817108] env[65758]: DEBUG nova.compute.utils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 717.823087] env[65758]: DEBUG nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 717.823902] env[65758]: DEBUG nova.network.neutron [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 717.823902] env[65758]: WARNING neutronclient.v2_0.client [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 717.824098] env[65758]: WARNING neutronclient.v2_0.client [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 717.824871] env[65758]: WARNING openstack [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 717.829360] env[65758]: WARNING openstack [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 717.873896] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "refresh_cache-4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.874324] env[65758]: DEBUG nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Instance network_info: |[{"id": "0cad03bd-bdfb-4780-a072-70a72be1d8b2", "address": "fa:16:3e:6f:bb:07", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cad03bd-bd", "ovs_interfaceid": "0cad03bd-bdfb-4780-a072-70a72be1d8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 717.874673] env[65758]: DEBUG oslo_concurrency.lockutils [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Acquired lock "refresh_cache-4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.874844] env[65758]: DEBUG nova.network.neutron [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Refreshing network info cache for port 0cad03bd-bdfb-4780-a072-70a72be1d8b2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 717.876442] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:bb:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0cad03bd-bdfb-4780-a072-70a72be1d8b2', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.887522] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 717.894673] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 717.895518] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d55e595a-ad16-4936-9cbf-6487f632cbfe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.926298] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.926298] env[65758]: value = "task-4660182" [ 717.926298] env[65758]: _type = "Task" [ 717.926298] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.939551] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660182, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.949350] env[65758]: DEBUG nova.policy [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b15f650508f844388197b63e6fee78a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4c2ab2b80c04c38bfb4c7cafac87fe6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 717.988229] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.157579] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660181, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.323928] env[65758]: WARNING openstack [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.324285] env[65758]: WARNING openstack [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.332727] env[65758]: DEBUG nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 718.391368] env[65758]: WARNING neutronclient.v2_0.client [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 718.393859] env[65758]: WARNING openstack [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.393859] env[65758]: WARNING openstack [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.427505] env[65758]: DEBUG nova.network.neutron [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 718.442040] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660182, 'name': CreateVM_Task, 'duration_secs': 0.516273} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.444882] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.446127] env[65758]: WARNING neutronclient.v2_0.client [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 718.446127] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.446332] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.446504] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 718.446773] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de2791f0-3eca-4f7c-976b-dbdb8acd78d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.455135] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 718.455135] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52794d6c-234a-5d24-062f-48fb666315ed" [ 718.455135] env[65758]: _type = "Task" [ 718.455135] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.470545] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52794d6c-234a-5d24-062f-48fb666315ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.478816] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "5e54e7f4-3df1-4283-bee1-a7e475051a24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.479073] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "5e54e7f4-3df1-4283-bee1-a7e475051a24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.489087] env[65758]: DEBUG nova.network.neutron [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Successfully created port: b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 718.506426] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df9c1c5-f610-49dc-847c-d22536e1fed1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.517260] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1cd150-54ab-422b-9035-697d806caad8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.556572] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66690fd-f1d9-4775-8fd2-e609055d244d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.566957] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc285163-5ec8-4080-8e55-1b8c0a32aaa5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.583856] env[65758]: DEBUG nova.compute.provider_tree [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.656105] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570696} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.656501] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9/8a7f1d79-97ac-4503-a4ed-c99e4f6718c9.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 718.657441] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 718.657441] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d40e6a53-6c11-4d03-bfda-92a2866c4ad8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.666492] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 718.666492] env[65758]: value = "task-4660183" [ 718.666492] env[65758]: _type = "Task" [ 718.666492] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.678726] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660183, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.689979] env[65758]: WARNING neutronclient.v2_0.client [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 718.690660] env[65758]: WARNING openstack [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.690994] env[65758]: WARNING openstack [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.810706] env[65758]: WARNING neutronclient.v2_0.client [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 718.811332] env[65758]: WARNING openstack [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 718.811958] env[65758]: WARNING openstack [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 718.872053] env[65758]: DEBUG nova.network.neutron [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Updating instance_info_cache with network_info: [{"id": "bc11b657-640b-458c-9870-62fd7fdbe88a", "address": "fa:16:3e:dc:f8:a2", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc11b657-64", "ovs_interfaceid": "bc11b657-640b-458c-9870-62fd7fdbe88a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 718.977041] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52794d6c-234a-5d24-062f-48fb666315ed, 'name': SearchDatastore_Task, 'duration_secs': 0.027425} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.977041] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.977616] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.977988] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.978281] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.978552] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.979185] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3af33e1d-def1-44cb-add5-55177c92193e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.995892] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.996103] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.996958] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d6414df-b7da-4e57-aafe-60af21f5883a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.002793] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 719.002793] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52741936-93d2-a4dd-ea79-20eb0009de80" [ 719.002793] env[65758]: _type = "Task" [ 719.002793] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.012688] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52741936-93d2-a4dd-ea79-20eb0009de80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.014667] env[65758]: DEBUG nova.compute.manager [req-717aa428-1ef0-4252-984b-7e94f7be59a3 req-f6c82caf-a832-4433-9385-c0f6fb8df257 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Received event network-vif-plugged-bc11b657-640b-458c-9870-62fd7fdbe88a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 719.014836] env[65758]: DEBUG oslo_concurrency.lockutils [req-717aa428-1ef0-4252-984b-7e94f7be59a3 req-f6c82caf-a832-4433-9385-c0f6fb8df257 service nova] Acquiring lock "974d06c1-2704-4a78-bbd7-f54335c4288e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.015056] env[65758]: DEBUG oslo_concurrency.lockutils [req-717aa428-1ef0-4252-984b-7e94f7be59a3 req-f6c82caf-a832-4433-9385-c0f6fb8df257 service nova] Lock "974d06c1-2704-4a78-bbd7-f54335c4288e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.015196] env[65758]: DEBUG oslo_concurrency.lockutils [req-717aa428-1ef0-4252-984b-7e94f7be59a3 req-f6c82caf-a832-4433-9385-c0f6fb8df257 service nova] Lock "974d06c1-2704-4a78-bbd7-f54335c4288e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.015350] env[65758]: DEBUG nova.compute.manager [req-717aa428-1ef0-4252-984b-7e94f7be59a3 req-f6c82caf-a832-4433-9385-c0f6fb8df257 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] No waiting events found dispatching network-vif-plugged-bc11b657-640b-458c-9870-62fd7fdbe88a {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 719.015509] env[65758]: WARNING nova.compute.manager [req-717aa428-1ef0-4252-984b-7e94f7be59a3 req-f6c82caf-a832-4433-9385-c0f6fb8df257 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Received unexpected event network-vif-plugged-bc11b657-640b-458c-9870-62fd7fdbe88a for instance with vm_state building and task_state spawning. [ 719.022480] env[65758]: DEBUG nova.network.neutron [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Updated VIF entry in instance network info cache for port 0cad03bd-bdfb-4780-a072-70a72be1d8b2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 719.022842] env[65758]: DEBUG nova.network.neutron [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Updating instance_info_cache with network_info: [{"id": "0cad03bd-bdfb-4780-a072-70a72be1d8b2", "address": "fa:16:3e:6f:bb:07", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cad03bd-bd", "ovs_interfaceid": "0cad03bd-bdfb-4780-a072-70a72be1d8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 719.089908] env[65758]: DEBUG nova.scheduler.client.report [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 719.183016] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660183, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.200063} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.183534] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 719.187181] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a948a2-33ee-4acd-8c4b-81bad4461fc4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.216022] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9/8a7f1d79-97ac-4503-a4ed-c99e4f6718c9.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 719.219179] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f83b742b-3fac-47f4-a18c-dad7687b231f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.238572] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 719.238572] env[65758]: value = "task-4660184" [ 719.238572] env[65758]: _type = "Task" [ 719.238572] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.251619] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660184, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.347538] env[65758]: DEBUG nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 719.378043] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 719.378043] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.378043] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 719.378043] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.378409] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 719.379022] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 719.379475] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 719.380174] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 719.380174] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 719.380301] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 719.380428] env[65758]: DEBUG nova.virt.hardware [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 719.380943] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.381294] env[65758]: DEBUG nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Instance network_info: |[{"id": "bc11b657-640b-458c-9870-62fd7fdbe88a", "address": "fa:16:3e:dc:f8:a2", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc11b657-64", "ovs_interfaceid": "bc11b657-640b-458c-9870-62fd7fdbe88a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 719.382281] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004cf5b2-1862-4e11-89c1-a1efe6f2f984 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.385401] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:f8:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd77ecbc-aaaf-45f4-ae8f-977d90e4052f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc11b657-640b-458c-9870-62fd7fdbe88a', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 719.396643] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 719.396643] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 719.396643] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff19e544-0da9-4c12-803e-356d22d4b70a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.416015] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cae008-9f34-47e1-b2f5-0afc1071a646 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.421535] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.421535] env[65758]: value = "task-4660185" [ 719.421535] env[65758]: _type = "Task" [ 719.421535] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.436025] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660185, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.516387] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52741936-93d2-a4dd-ea79-20eb0009de80, 'name': SearchDatastore_Task, 'duration_secs': 0.088667} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.517351] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9fbacd6-c27f-47bf-831b-ea77ad902d89 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.526042] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 719.526042] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bd3670-184a-ed97-307d-269b99e81137" [ 719.526042] env[65758]: _type = "Task" [ 719.526042] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.530058] env[65758]: DEBUG oslo_concurrency.lockutils [req-32d1286e-ed02-4209-9e70-4954b8f4276e req-dce42346-7a7b-4e2a-a354-f68f350db68a service nova] Releasing lock "refresh_cache-4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.536930] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bd3670-184a-ed97-307d-269b99e81137, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.596843] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.288s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.601231] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.418s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.603899] env[65758]: INFO nova.compute.claims [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.636580] env[65758]: INFO nova.scheduler.client.report [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Deleted allocations for instance 483765b5-c63c-4aac-9082-519bbc4e6eb5 [ 719.751211] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.823551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "de8f3600-b25f-4396-af37-ea703587979c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.823551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "de8f3600-b25f-4396-af37-ea703587979c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.823551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "de8f3600-b25f-4396-af37-ea703587979c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.823551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "de8f3600-b25f-4396-af37-ea703587979c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.823551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "de8f3600-b25f-4396-af37-ea703587979c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.826693] env[65758]: INFO nova.compute.manager [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Terminating instance [ 719.935543] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660185, 'name': CreateVM_Task, 'duration_secs': 0.433762} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.935543] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 719.936792] env[65758]: WARNING neutronclient.v2_0.client [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 719.936792] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.936792] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.936940] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 719.937132] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed91b1d0-3549-4b73-a5cd-4287f0de01cf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.945142] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 719.945142] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522cb2f6-4242-e226-8d67-973c30f495bf" [ 719.945142] env[65758]: _type = "Task" [ 719.945142] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.957495] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522cb2f6-4242-e226-8d67-973c30f495bf, 'name': SearchDatastore_Task, 'duration_secs': 0.01095} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.958382] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.958382] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 719.958382] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.958382] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.958637] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 719.958811] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05fab6f9-e955-4682-85bd-75aaaf7ddc32 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.975022] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 719.975022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 719.975022] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f3b1b7f-9041-424f-95b5-5b0afe706a2e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.980945] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 719.980945] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae318a-1c9c-4931-d5b3-ef11e89ac989" [ 719.980945] env[65758]: _type = "Task" [ 719.980945] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.992135] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae318a-1c9c-4931-d5b3-ef11e89ac989, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.039802] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bd3670-184a-ed97-307d-269b99e81137, 'name': SearchDatastore_Task, 'duration_secs': 0.026022} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.041509] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.041796] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1/4fda2aa0-451c-4c0f-a03a-19ea8b083ba1.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 720.046027] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7847fb8-833e-4d49-9b6f-8ceae34f7077 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.055740] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 720.055740] env[65758]: value = "task-4660186" [ 720.055740] env[65758]: _type = "Task" [ 720.055740] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.069113] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.152749] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f4327b19-36d1-496e-8301-efeaebfa145c tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "483765b5-c63c-4aac-9082-519bbc4e6eb5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.939s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.171056] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "105c53ce-e657-4a29-bc7f-96b4f885707a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.171315] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.229211] env[65758]: DEBUG nova.network.neutron [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Successfully updated port: b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 720.255101] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660184, 'name': ReconfigVM_Task, 'duration_secs': 0.9945} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.256208] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9/8a7f1d79-97ac-4503-a4ed-c99e4f6718c9.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.256925] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9acf8aa8-84d2-422f-83f9-f9705bada73d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.266642] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 720.266642] env[65758]: value = "task-4660187" [ 720.266642] env[65758]: _type = "Task" [ 720.266642] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.283764] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660187, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.332154] env[65758]: DEBUG nova.compute.manager [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 720.332154] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.333239] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa757b5-e15d-453a-9c95-c9e3b08eeb32 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.346664] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 720.346988] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22a67ccc-0e7a-4d48-82a3-ba3007340e4a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.356123] env[65758]: DEBUG oslo_vmware.api [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 720.356123] env[65758]: value = "task-4660188" [ 720.356123] env[65758]: _type = "Task" [ 720.356123] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.367755] env[65758]: DEBUG oslo_vmware.api [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660188, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.496042] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae318a-1c9c-4931-d5b3-ef11e89ac989, 'name': SearchDatastore_Task, 'duration_secs': 0.011569} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.497028] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-789c9635-0c91-449a-80bf-eec95e89423d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.504398] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 720.504398] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5255fd62-d937-d1c3-9771-11c80457d1e6" [ 720.504398] env[65758]: _type = "Task" [ 720.504398] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.518692] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5255fd62-d937-d1c3-9771-11c80457d1e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.549429] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "b7692c74-c919-45b4-991b-c06a530ff9ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.550466] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7692c74-c919-45b4-991b-c06a530ff9ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.550466] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "b7692c74-c919-45b4-991b-c06a530ff9ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.550466] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7692c74-c919-45b4-991b-c06a530ff9ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.550466] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7692c74-c919-45b4-991b-c06a530ff9ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.554609] env[65758]: INFO nova.compute.manager [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Terminating instance [ 720.571485] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660186, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.606920] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquiring lock "56ff4122-a999-4caf-b805-0754a66d6bc7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.607316] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "56ff4122-a999-4caf-b805-0754a66d6bc7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.734464] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.735621] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.735621] env[65758]: DEBUG nova.network.neutron [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 720.784985] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660187, 'name': Rename_Task, 'duration_secs': 0.335181} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.787870] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 720.788400] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-877918cc-07ed-4945-ac28-fb175c1dba36 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.799188] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 720.799188] env[65758]: value = "task-4660189" [ 720.799188] env[65758]: _type = "Task" [ 720.799188] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.816883] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.874055] env[65758]: DEBUG oslo_vmware.api [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660188, 'name': PowerOffVM_Task, 'duration_secs': 0.367026} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.874416] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 720.874631] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 720.874981] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a60a7a2-6519-452c-8d5c-61210f04d7fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.958593] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 720.958593] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 720.958593] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleting the datastore file [datastore2] de8f3600-b25f-4396-af37-ea703587979c {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 720.958593] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4559619-5f9e-4da4-b11a-0edbd57a7a66 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.967355] env[65758]: DEBUG oslo_vmware.api [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 720.967355] env[65758]: value = "task-4660191" [ 720.967355] env[65758]: _type = "Task" [ 720.967355] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.985083] env[65758]: DEBUG oslo_vmware.api [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660191, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.022623] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5255fd62-d937-d1c3-9771-11c80457d1e6, 'name': SearchDatastore_Task, 'duration_secs': 0.032249} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.026580] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.027021] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e/974d06c1-2704-4a78-bbd7-f54335c4288e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 721.028257] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4bb20bad-4046-4395-ab4b-4bbf42ac250c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.037451] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 721.037451] env[65758]: value = "task-4660192" [ 721.037451] env[65758]: _type = "Task" [ 721.037451] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.051336] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.065407] env[65758]: DEBUG nova.compute.manager [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 721.065505] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 721.070564] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200f7429-e9dd-4e6a-b4d9-1ecdb824a377 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.073649] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660186, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616849} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.073989] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1/4fda2aa0-451c-4c0f-a03a-19ea8b083ba1.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.074257] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.074955] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09ee0223-8c79-42c1-94c1-449a4edcd0f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.082726] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 721.082726] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d0b23d1-c458-4aa5-b890-6ef0d250f740 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.087105] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 721.087105] env[65758]: value = "task-4660193" [ 721.087105] env[65758]: _type = "Task" [ 721.087105] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.095709] env[65758]: DEBUG oslo_vmware.api [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 721.095709] env[65758]: value = "task-4660194" [ 721.095709] env[65758]: _type = "Task" [ 721.095709] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.104542] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660193, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.111399] env[65758]: DEBUG oslo_vmware.api [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.242134] env[65758]: WARNING openstack [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 721.242821] env[65758]: WARNING openstack [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 721.295061] env[65758]: DEBUG nova.compute.manager [req-8a71ed34-9896-490f-a937-334a84c43218 req-8f2702c2-026a-4f2b-955c-2dd4198cbb0f service nova] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Received event network-vif-deleted-4d1d9ca2-dbbe-4bfa-aaeb-8f6a7d1509d7 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 721.318853] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660189, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.355810] env[65758]: DEBUG nova.network.neutron [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 721.482793] env[65758]: DEBUG oslo_vmware.api [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660191, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23918} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.483109] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.483301] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 721.483489] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.483680] env[65758]: INFO nova.compute.manager [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: de8f3600-b25f-4396-af37-ea703587979c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 721.484094] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 721.484355] env[65758]: DEBUG nova.compute.manager [-] [instance: de8f3600-b25f-4396-af37-ea703587979c] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 721.484473] env[65758]: DEBUG nova.network.neutron [-] [instance: de8f3600-b25f-4396-af37-ea703587979c] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 721.484777] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 721.485370] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 721.485984] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 721.494369] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d9cd19-d6ca-4a7b-af6c-586d234d688c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.503547] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7c6762-5ed4-4c2e-a39c-58df4ec97c5e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.544190] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575134c9-b26f-4871-a48e-e9cd83879a07 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.554028] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660192, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.558182] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a935099b-5f1e-48cb-b2b1-5c51c524b9a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.575764] env[65758]: DEBUG nova.compute.provider_tree [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.600199] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660193, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095278} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.603867] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 721.605509] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76ca13e-ca98-4153-8832-08c117a60a57 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.615434] env[65758]: DEBUG oslo_vmware.api [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660194, 'name': PowerOffVM_Task, 'duration_secs': 0.227838} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.629721] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 721.630266] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 721.645111] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1/4fda2aa0-451c-4c0f-a03a-19ea8b083ba1.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 721.645490] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f921e47a-3f59-4f57-929e-c9b1209772a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.647926] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-393fa727-14a6-4dd7-8590-7ca2f133e68c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.672795] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 721.672795] env[65758]: value = "task-4660196" [ 721.672795] env[65758]: _type = "Task" [ 721.672795] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.684698] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.742487] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 721.742634] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 721.742784] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleting the datastore file [datastore2] b7692c74-c919-45b4-991b-c06a530ff9ef {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 721.743077] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ce47e4e-ecdc-485f-a46e-47dce6b5d2ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.751601] env[65758]: DEBUG oslo_vmware.api [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 721.751601] env[65758]: value = "task-4660197" [ 721.751601] env[65758]: _type = "Task" [ 721.751601] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.767315] env[65758]: DEBUG oslo_vmware.api [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.815524] env[65758]: DEBUG oslo_vmware.api [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660189, 'name': PowerOnVM_Task, 'duration_secs': 0.595535} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.815932] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 721.816213] env[65758]: DEBUG nova.compute.manager [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 721.817155] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d32d69-0b0e-401b-a81b-a6464de6655c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.881494] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 721.895448] env[65758]: WARNING neutronclient.v2_0.client [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 721.896151] env[65758]: WARNING openstack [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 721.897410] env[65758]: WARNING openstack [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 722.051134] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660192, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.724626} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.051489] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e/974d06c1-2704-4a78-bbd7-f54335c4288e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 722.051724] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 722.051943] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65a9e572-24a7-448c-a056-a0b11c5d8e9e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.059704] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 722.059704] env[65758]: value = "task-4660198" [ 722.059704] env[65758]: _type = "Task" [ 722.059704] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.068982] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.079027] env[65758]: DEBUG nova.scheduler.client.report [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 722.185423] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660196, 'name': ReconfigVM_Task, 'duration_secs': 0.328557} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.187949] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1/4fda2aa0-451c-4c0f-a03a-19ea8b083ba1.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 722.189340] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d0b0f29-ea03-4e47-a160-e45f48492d58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.199414] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 722.199414] env[65758]: value = "task-4660199" [ 722.199414] env[65758]: _type = "Task" [ 722.199414] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.213546] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660199, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.264477] env[65758]: DEBUG oslo_vmware.api [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392436} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.264873] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 722.265076] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 722.265321] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 722.265773] env[65758]: INFO nova.compute.manager [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Took 1.20 seconds to destroy the instance on the hypervisor. [ 722.266108] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 722.266360] env[65758]: DEBUG nova.compute.manager [-] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 722.266557] env[65758]: DEBUG nova.network.neutron [-] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 722.266766] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 722.267403] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 722.267666] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 722.346610] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.409654] env[65758]: DEBUG nova.network.neutron [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 722.469726] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 722.571817] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102225} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.572269] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.575630] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c1ab30-9ee5-4828-baa4-9ecceb9509f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.595971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.995s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.596518] env[65758]: DEBUG nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 722.611108] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e/974d06c1-2704-4a78-bbd7-f54335c4288e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.611108] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.227s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.612478] env[65758]: INFO nova.compute.claims [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.619875] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f254ac3e-788e-49db-ae17-ad849a20b392 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.646486] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 722.646486] env[65758]: value = "task-4660200" [ 722.646486] env[65758]: _type = "Task" [ 722.646486] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.656900] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660200, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.689955] env[65758]: DEBUG nova.network.neutron [-] [instance: de8f3600-b25f-4396-af37-ea703587979c] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 722.711574] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660199, 'name': Rename_Task, 'duration_secs': 0.168887} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.712535] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 722.712535] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07496954-19fe-43f4-a4d5-3ac45598c75b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.722375] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 722.722375] env[65758]: value = "task-4660201" [ 722.722375] env[65758]: _type = "Task" [ 722.722375] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.733024] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.913111] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.913444] env[65758]: DEBUG nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Instance network_info: |[{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 722.914263] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:af:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5efce30e-48dd-493a-a354-f562a8adf7af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 722.925630] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Creating folder: Project (c4c2ab2b80c04c38bfb4c7cafac87fe6). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.927213] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3268e128-fd07-41ab-8d31-054af799a919 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.931232] env[65758]: DEBUG nova.compute.manager [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Received event network-changed-bc11b657-640b-458c-9870-62fd7fdbe88a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 722.931480] env[65758]: DEBUG nova.compute.manager [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Refreshing instance network info cache due to event network-changed-bc11b657-640b-458c-9870-62fd7fdbe88a. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 722.931749] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Acquiring lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.931923] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Acquired lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.932150] env[65758]: DEBUG nova.network.neutron [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Refreshing network info cache for port bc11b657-640b-458c-9870-62fd7fdbe88a {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 722.945490] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Created folder: Project (c4c2ab2b80c04c38bfb4c7cafac87fe6) in parent group-v909763. [ 722.946600] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Creating folder: Instances. Parent ref: group-v909862. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.946600] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aec0fb6e-fab8-488a-bd63-69e6824e3c4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.961140] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Created folder: Instances in parent group-v909862. [ 722.964657] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 722.964657] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6159a35-f073-4931-b0b0-832a88680356] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 722.964657] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-616c26d9-c8e2-49f8-99e1-52be18d204dd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.985794] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 722.985794] env[65758]: value = "task-4660204" [ 722.985794] env[65758]: _type = "Task" [ 722.985794] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.995549] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660204, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.112795] env[65758]: DEBUG nova.compute.utils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 723.114856] env[65758]: DEBUG nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 723.115179] env[65758]: DEBUG nova.network.neutron [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 723.115993] env[65758]: WARNING neutronclient.v2_0.client [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 723.115993] env[65758]: WARNING neutronclient.v2_0.client [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 723.116442] env[65758]: WARNING openstack [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 723.116795] env[65758]: WARNING openstack [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 723.160276] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660200, 'name': ReconfigVM_Task, 'duration_secs': 0.415275} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.160629] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e/974d06c1-2704-4a78-bbd7-f54335c4288e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.161349] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6d8edf6-12fe-479e-9c60-146105f31806 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.171283] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 723.171283] env[65758]: value = "task-4660205" [ 723.171283] env[65758]: _type = "Task" [ 723.171283] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.174515] env[65758]: DEBUG nova.policy [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3acf0a8cd564f81914c7f95a3c4dce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3216444936b0444184f3cbb1497fffc6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 723.187062] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660205, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.192772] env[65758]: INFO nova.compute.manager [-] [instance: de8f3600-b25f-4396-af37-ea703587979c] Took 1.71 seconds to deallocate network for instance. [ 723.219092] env[65758]: DEBUG nova.network.neutron [-] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 723.239375] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660201, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.436828] env[65758]: WARNING neutronclient.v2_0.client [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 723.437874] env[65758]: WARNING openstack [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 723.438522] env[65758]: WARNING openstack [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 723.503566] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660204, 'name': CreateVM_Task, 'duration_secs': 0.422522} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.503757] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6159a35-f073-4931-b0b0-832a88680356] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 723.504315] env[65758]: WARNING neutronclient.v2_0.client [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 723.504666] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.504807] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.505153] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 723.505428] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dcb0e01-aa8a-455e-aef8-98ffc4e45e48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.512147] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 723.512147] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e98f2b-1d4f-8bfd-46a5-8a0afbda10dd" [ 723.512147] env[65758]: _type = "Task" [ 723.512147] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.521709] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e98f2b-1d4f-8bfd-46a5-8a0afbda10dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.553939] env[65758]: DEBUG nova.network.neutron [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Successfully created port: 528ce775-8b65-438e-b3a5-647df86651f8 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 723.629015] env[65758]: DEBUG nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 723.697112] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660205, 'name': Rename_Task, 'duration_secs': 0.259811} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.697112] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.697112] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fee2505-071e-43f5-9862-d2a510376666 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.702880] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.705453] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 723.705453] env[65758]: value = "task-4660206" [ 723.705453] env[65758]: _type = "Task" [ 723.705453] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.718805] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660206, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.732494] env[65758]: INFO nova.compute.manager [-] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Took 1.47 seconds to deallocate network for instance. [ 723.738862] env[65758]: DEBUG oslo_vmware.api [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660201, 'name': PowerOnVM_Task, 'duration_secs': 0.725732} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.745264] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 723.745695] env[65758]: INFO nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Took 9.90 seconds to spawn the instance on the hypervisor. [ 723.745695] env[65758]: DEBUG nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 723.750238] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9c17fe-6e73-4e70-b0fc-fe612f8a540b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.023844] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e98f2b-1d4f-8bfd-46a5-8a0afbda10dd, 'name': SearchDatastore_Task, 'duration_secs': 0.032463} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.027062] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.027318] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 724.027634] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.027799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.028047] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 724.028596] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5efae5fc-5404-48f9-a317-e190f1273a9b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.041784] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 724.041784] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 724.044752] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5d37c4a-bd0c-401a-abea-591192aafde2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.052084] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 724.052084] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b2e2da-17b5-7770-641d-d864776f876c" [ 724.052084] env[65758]: _type = "Task" [ 724.052084] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.061817] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b2e2da-17b5-7770-641d-d864776f876c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.219817] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660206, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.248315] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.277621] env[65758]: INFO nova.compute.manager [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Took 33.06 seconds to build instance. [ 724.294060] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae25174-4088-4388-b02c-f53320018bbd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.305940] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247d84b5-b059-43ea-85ae-234c45650209 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.362994] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86631d1f-db25-4a7b-9242-455783c48c4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.375984] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35c4612-9417-4e4b-8423-3c74ad673485 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.401195] env[65758]: DEBUG nova.compute.provider_tree [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.563521] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b2e2da-17b5-7770-641d-d864776f876c, 'name': SearchDatastore_Task, 'duration_secs': 0.016231} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.566115] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a30081da-0be2-4c6e-8d2a-c6b021517a62 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.571218] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 724.571218] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521ac9b7-4d5d-f2cd-2322-1017d9ab5d53" [ 724.571218] env[65758]: _type = "Task" [ 724.571218] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.582973] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521ac9b7-4d5d-f2cd-2322-1017d9ab5d53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.642442] env[65758]: DEBUG nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 724.681726] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 724.682359] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 724.682751] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 724.683203] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 724.683563] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 724.683943] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 724.684424] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.684810] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 724.685227] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 724.685641] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 724.686111] env[65758]: DEBUG nova.virt.hardware [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 724.687621] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016bf6e2-a967-48e7-9c91-850938669e42 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.704900] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2199f53c-b557-47f8-8e7c-cd8e6a6fd19f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.734560] env[65758]: DEBUG oslo_vmware.api [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660206, 'name': PowerOnVM_Task, 'duration_secs': 0.674703} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.734855] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.735087] env[65758]: INFO nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Took 8.09 seconds to spawn the instance on the hypervisor. [ 724.735277] env[65758]: DEBUG nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 724.736141] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96792f5-36e8-4f94-8724-680c7977fc04 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.781479] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58a29c56-5661-42ef-84c2-e272bbe746cb tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.572s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.906165] env[65758]: DEBUG nova.scheduler.client.report [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 725.083413] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521ac9b7-4d5d-f2cd-2322-1017d9ab5d53, 'name': SearchDatastore_Task, 'duration_secs': 0.036941} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.083697] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.083943] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e6159a35-f073-4931-b0b0-832a88680356/e6159a35-f073-4931-b0b0-832a88680356.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.084253] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28ced364-e0f2-4c2b-ba18-a850184ea594 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.093226] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 725.093226] env[65758]: value = "task-4660207" [ 725.093226] env[65758]: _type = "Task" [ 725.093226] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.103017] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.257876] env[65758]: DEBUG nova.network.neutron [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Successfully updated port: 528ce775-8b65-438e-b3a5-647df86651f8 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 725.265533] env[65758]: INFO nova.compute.manager [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Took 33.20 seconds to build instance. [ 725.286242] env[65758]: DEBUG nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 725.364365] env[65758]: WARNING neutronclient.v2_0.client [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 725.365136] env[65758]: WARNING openstack [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 725.365605] env[65758]: WARNING openstack [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 725.413137] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.802s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.414240] env[65758]: DEBUG nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 725.422536] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.777s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.422536] env[65758]: DEBUG nova.objects.instance [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lazy-loading 'resources' on Instance uuid 8eb65797-072b-4a7e-853d-26c0adc51bb2 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 725.604688] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.614540] env[65758]: DEBUG nova.network.neutron [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Updated VIF entry in instance network info cache for port bc11b657-640b-458c-9870-62fd7fdbe88a. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 725.614958] env[65758]: DEBUG nova.network.neutron [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Updating instance_info_cache with network_info: [{"id": "bc11b657-640b-458c-9870-62fd7fdbe88a", "address": "fa:16:3e:dc:f8:a2", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc11b657-64", "ovs_interfaceid": "bc11b657-640b-458c-9870-62fd7fdbe88a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 725.734800] env[65758]: DEBUG nova.compute.manager [req-20c8c081-eed3-4d2c-b2f7-c526b28ce6ee req-1c0f3ff8-6e93-42c3-bd5e-02e84bacd5c9 service nova] [instance: de8f3600-b25f-4396-af37-ea703587979c] Received event network-vif-deleted-bff20363-4d58-4158-b43d-6fd204aae97d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 725.736245] env[65758]: DEBUG nova.compute.manager [req-20c8c081-eed3-4d2c-b2f7-c526b28ce6ee req-1c0f3ff8-6e93-42c3-bd5e-02e84bacd5c9 service nova] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Received event network-vif-deleted-47787883-24ce-41e2-9595-7d07b0d86022 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 725.767618] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "refresh_cache-9118ff13-e2cf-404c-ae4d-2b9dbc52738d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.767618] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "refresh_cache-9118ff13-e2cf-404c-ae4d-2b9dbc52738d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.767618] env[65758]: DEBUG nova.network.neutron [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 725.769095] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a2f96675-c1d3-4ebc-b534-78794de0d361 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "974d06c1-2704-4a78-bbd7-f54335c4288e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.722s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.818534] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.925033] env[65758]: DEBUG nova.compute.utils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 725.925033] env[65758]: DEBUG nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 725.925033] env[65758]: DEBUG nova.network.neutron [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 725.925264] env[65758]: WARNING neutronclient.v2_0.client [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 725.925423] env[65758]: WARNING neutronclient.v2_0.client [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 725.926045] env[65758]: WARNING openstack [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 725.926392] env[65758]: WARNING openstack [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 726.009447] env[65758]: DEBUG nova.policy [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3a79fbdbdc4294a30f87eabe5719de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9aaf5b39abda42f28a847d5fe0d0ecec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 726.110832] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660207, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.119188] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Releasing lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.119407] env[65758]: DEBUG nova.compute.manager [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Received event network-vif-plugged-b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 726.119633] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Acquiring lock "e6159a35-f073-4931-b0b0-832a88680356-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.119836] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Lock "e6159a35-f073-4931-b0b0-832a88680356-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.120033] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Lock "e6159a35-f073-4931-b0b0-832a88680356-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.120167] env[65758]: DEBUG nova.compute.manager [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] No waiting events found dispatching network-vif-plugged-b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 726.120434] env[65758]: WARNING nova.compute.manager [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Received unexpected event network-vif-plugged-b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 for instance with vm_state building and task_state spawning. [ 726.120517] env[65758]: DEBUG nova.compute.manager [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Received event network-changed-b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 726.120634] env[65758]: DEBUG nova.compute.manager [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Refreshing instance network info cache due to event network-changed-b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 726.120785] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Acquiring lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.120906] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Acquired lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.121067] env[65758]: DEBUG nova.network.neutron [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Refreshing network info cache for port b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 726.274816] env[65758]: WARNING openstack [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 726.275923] env[65758]: WARNING openstack [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 726.289119] env[65758]: DEBUG nova.compute.manager [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 726.370201] env[65758]: DEBUG nova.network.neutron [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Successfully created port: 12074e98-5413-4e8e-bedf-73bb6ccc2248 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 726.380735] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "81f961c3-ec8f-4281-be18-5d605fa73ecc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.380982] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.384381] env[65758]: DEBUG nova.network.neutron [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 726.440694] env[65758]: DEBUG nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 726.551230] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1650f7e9-1c0f-429e-9cd4-b8263215abf4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.555101] env[65758]: WARNING neutronclient.v2_0.client [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 726.556639] env[65758]: WARNING openstack [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 726.556639] env[65758]: WARNING openstack [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 726.570476] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e715702-5176-4836-869a-4badacc2d7e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.607586] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8a55a8-25ee-486f-8f37-761672bb4aa8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.616599] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660207, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.620099] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d7cce3-397d-4528-a845-ae32e5f2b055 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.626442] env[65758]: WARNING neutronclient.v2_0.client [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 726.627545] env[65758]: WARNING openstack [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 726.627992] env[65758]: WARNING openstack [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 726.653031] env[65758]: DEBUG nova.compute.provider_tree [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.756156] env[65758]: DEBUG nova.network.neutron [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Updating instance_info_cache with network_info: [{"id": "528ce775-8b65-438e-b3a5-647df86651f8", "address": "fa:16:3e:1f:c4:42", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap528ce775-8b", "ovs_interfaceid": "528ce775-8b65-438e-b3a5-647df86651f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 726.820288] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.915855] env[65758]: WARNING neutronclient.v2_0.client [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 726.916512] env[65758]: WARNING openstack [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 726.916853] env[65758]: WARNING openstack [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 727.114825] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660207, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.968157} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.115152] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e6159a35-f073-4931-b0b0-832a88680356/e6159a35-f073-4931-b0b0-832a88680356.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.115391] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.115670] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17dfd0bd-1fcd-4149-ba7c-384b0c3ba539 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.121936] env[65758]: DEBUG nova.objects.instance [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lazy-loading 'flavor' on Instance uuid adc1b956-1b5a-4272-b0ff-95a565e9c45c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.126024] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 727.126024] env[65758]: value = "task-4660208" [ 727.126024] env[65758]: _type = "Task" [ 727.126024] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.134711] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660208, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.156525] env[65758]: DEBUG nova.scheduler.client.report [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 727.239135] env[65758]: DEBUG nova.network.neutron [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updated VIF entry in instance network info cache for port b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 727.239135] env[65758]: DEBUG nova.network.neutron [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 727.258946] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "refresh_cache-9118ff13-e2cf-404c-ae4d-2b9dbc52738d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.259345] env[65758]: DEBUG nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Instance network_info: |[{"id": "528ce775-8b65-438e-b3a5-647df86651f8", "address": "fa:16:3e:1f:c4:42", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap528ce775-8b", "ovs_interfaceid": "528ce775-8b65-438e-b3a5-647df86651f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 727.259893] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:c4:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a965790c-2d2f-4c2a-9ee7-745f4d53039b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '528ce775-8b65-438e-b3a5-647df86651f8', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.268656] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Creating folder: Project (3216444936b0444184f3cbb1497fffc6). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.268968] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48b01481-48c5-4085-bffd-9c82f228176f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.283353] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Created folder: Project (3216444936b0444184f3cbb1497fffc6) in parent group-v909763. [ 727.283353] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Creating folder: Instances. Parent ref: group-v909865. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.283353] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9dd4d236-a065-48ee-8ca6-2aba503f5ece {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.292463] env[65758]: INFO nova.compute.manager [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Rescuing [ 727.292862] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.293043] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.293207] env[65758]: DEBUG nova.network.neutron [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 727.300615] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Created folder: Instances in parent group-v909865. [ 727.302035] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 727.302035] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.302035] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d9d8f54-137e-4cad-bfa5-97ad1bae386c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.325291] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.325291] env[65758]: value = "task-4660211" [ 727.325291] env[65758]: _type = "Task" [ 727.325291] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.335172] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660211, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.450790] env[65758]: DEBUG nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 727.478960] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.479321] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.479602] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.479794] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.479953] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.482442] env[65758]: INFO nova.compute.manager [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Terminating instance [ 727.489140] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 727.489377] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 727.489457] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 727.489694] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 727.489838] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 727.489994] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 727.490340] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.490459] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 727.490748] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 727.490915] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 727.491178] env[65758]: DEBUG nova.virt.hardware [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 727.492463] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bfece0-e66d-4e83-8cb5-5a33e0b68342 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.506026] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-badc6797-cc1d-4416-81bd-0595f1ff6058 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.630623] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.630871] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquired lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.631215] env[65758]: WARNING neutronclient.v2_0.client [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 727.640410] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660208, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.303785} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.640695] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.641846] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf94f310-30ac-42ed-9d9f-43d943550f67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.672967] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] e6159a35-f073-4931-b0b0-832a88680356/e6159a35-f073-4931-b0b0-832a88680356.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.673853] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.256s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.676445] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0e852a7-6f03-48a0-b3bc-6b1230828063 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.691867] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.549s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.693672] env[65758]: INFO nova.compute.claims [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.705788] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 727.705788] env[65758]: value = "task-4660212" [ 727.705788] env[65758]: _type = "Task" [ 727.705788] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.718789] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660212, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.724084] env[65758]: INFO nova.scheduler.client.report [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Deleted allocations for instance 8eb65797-072b-4a7e-853d-26c0adc51bb2 [ 727.739922] env[65758]: DEBUG oslo_concurrency.lockutils [req-b7d14e81-653c-4234-8432-ddf1908735a6 req-33525295-9260-47ce-ac5d-5d956e99a315 service nova] Releasing lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.796095] env[65758]: WARNING neutronclient.v2_0.client [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 727.796829] env[65758]: WARNING openstack [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 727.797203] env[65758]: WARNING openstack [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 727.835265] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660211, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.002430] env[65758]: DEBUG nova.compute.manager [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 728.002430] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.002430] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e8d6e8-b34e-440c-930c-ea78aa338ab4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.013945] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.014242] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad246135-1306-40b9-8c1c-fedf9845adf4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.022927] env[65758]: DEBUG oslo_vmware.api [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 728.022927] env[65758]: value = "task-4660213" [ 728.022927] env[65758]: _type = "Task" [ 728.022927] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.032646] env[65758]: DEBUG oslo_vmware.api [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.036036] env[65758]: DEBUG nova.network.neutron [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Successfully updated port: 12074e98-5413-4e8e-bedf-73bb6ccc2248 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 728.221405] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.232842] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a8c630b6-0320-43fa-b995-4d6173e71696 tempest-ServerExternalEventsTest-600761519 tempest-ServerExternalEventsTest-600761519-project-member] Lock "8eb65797-072b-4a7e-853d-26c0adc51bb2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.130s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.258841] env[65758]: WARNING neutronclient.v2_0.client [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 728.259544] env[65758]: WARNING openstack [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 728.259928] env[65758]: WARNING openstack [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 728.339612] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660211, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.413602] env[65758]: DEBUG nova.network.neutron [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 728.449347] env[65758]: DEBUG nova.network.neutron [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Updating instance_info_cache with network_info: [{"id": "bc11b657-640b-458c-9870-62fd7fdbe88a", "address": "fa:16:3e:dc:f8:a2", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc11b657-64", "ovs_interfaceid": "bc11b657-640b-458c-9870-62fd7fdbe88a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 728.533436] env[65758]: DEBUG oslo_vmware.api [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.539272] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.539468] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.539729] env[65758]: DEBUG nova.network.neutron [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 728.720949] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.842528] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660211, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.916878] env[65758]: WARNING neutronclient.v2_0.client [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 728.917266] env[65758]: WARNING openstack [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 728.917526] env[65758]: WARNING openstack [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 728.952498] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "refresh_cache-974d06c1-2704-4a78-bbd7-f54335c4288e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.960715] env[65758]: DEBUG nova.compute.manager [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Received event network-vif-plugged-528ce775-8b65-438e-b3a5-647df86651f8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 728.960715] env[65758]: DEBUG oslo_concurrency.lockutils [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Acquiring lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.960715] env[65758]: DEBUG oslo_concurrency.lockutils [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.960715] env[65758]: DEBUG oslo_concurrency.lockutils [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.960715] env[65758]: DEBUG nova.compute.manager [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] No waiting events found dispatching network-vif-plugged-528ce775-8b65-438e-b3a5-647df86651f8 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 728.960715] env[65758]: WARNING nova.compute.manager [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Received unexpected event network-vif-plugged-528ce775-8b65-438e-b3a5-647df86651f8 for instance with vm_state building and task_state spawning. [ 728.960715] env[65758]: DEBUG nova.compute.manager [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Received event network-changed-528ce775-8b65-438e-b3a5-647df86651f8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 728.960715] env[65758]: DEBUG nova.compute.manager [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Refreshing instance network info cache due to event network-changed-528ce775-8b65-438e-b3a5-647df86651f8. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 728.961587] env[65758]: DEBUG oslo_concurrency.lockutils [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Acquiring lock "refresh_cache-9118ff13-e2cf-404c-ae4d-2b9dbc52738d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.961587] env[65758]: DEBUG oslo_concurrency.lockutils [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Acquired lock "refresh_cache-9118ff13-e2cf-404c-ae4d-2b9dbc52738d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.961587] env[65758]: DEBUG nova.network.neutron [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Refreshing network info cache for port 528ce775-8b65-438e-b3a5-647df86651f8 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 729.037148] env[65758]: DEBUG oslo_vmware.api [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660213, 'name': PowerOffVM_Task, 'duration_secs': 1.008274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.040261] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.040434] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.040983] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e8814ae-1731-48ee-976c-d6dc2daf66b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.043427] env[65758]: WARNING openstack [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 729.043787] env[65758]: WARNING openstack [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 729.107975] env[65758]: DEBUG nova.network.neutron [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 729.112591] env[65758]: DEBUG nova.compute.manager [req-440d068a-8982-4baf-8202-b4a05a409fae req-bfe6afed-ba72-4f7e-93ae-0d5c7d2b98ec service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Received event network-vif-plugged-12074e98-5413-4e8e-bedf-73bb6ccc2248 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 729.112819] env[65758]: DEBUG oslo_concurrency.lockutils [req-440d068a-8982-4baf-8202-b4a05a409fae req-bfe6afed-ba72-4f7e-93ae-0d5c7d2b98ec service nova] Acquiring lock "54db018a-d54c-4fe5-9a6e-600e801e00b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.113038] env[65758]: DEBUG oslo_concurrency.lockutils [req-440d068a-8982-4baf-8202-b4a05a409fae req-bfe6afed-ba72-4f7e-93ae-0d5c7d2b98ec service nova] Lock "54db018a-d54c-4fe5-9a6e-600e801e00b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.113207] env[65758]: DEBUG oslo_concurrency.lockutils [req-440d068a-8982-4baf-8202-b4a05a409fae req-bfe6afed-ba72-4f7e-93ae-0d5c7d2b98ec service nova] Lock "54db018a-d54c-4fe5-9a6e-600e801e00b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.113367] env[65758]: DEBUG nova.compute.manager [req-440d068a-8982-4baf-8202-b4a05a409fae req-bfe6afed-ba72-4f7e-93ae-0d5c7d2b98ec service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] No waiting events found dispatching network-vif-plugged-12074e98-5413-4e8e-bedf-73bb6ccc2248 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 729.113638] env[65758]: WARNING nova.compute.manager [req-440d068a-8982-4baf-8202-b4a05a409fae req-bfe6afed-ba72-4f7e-93ae-0d5c7d2b98ec service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Received unexpected event network-vif-plugged-12074e98-5413-4e8e-bedf-73bb6ccc2248 for instance with vm_state building and task_state spawning. [ 729.140421] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 729.140657] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 729.140838] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleting the datastore file [datastore2] 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 729.141188] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8216bda3-8deb-4ca3-8dcf-5977f94d4ed6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.154349] env[65758]: DEBUG oslo_vmware.api [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 729.154349] env[65758]: value = "task-4660215" [ 729.154349] env[65758]: _type = "Task" [ 729.154349] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.166393] env[65758]: DEBUG oslo_vmware.api [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660215, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.203525] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.203991] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.228669] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660212, 'name': ReconfigVM_Task, 'duration_secs': 1.189863} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.228985] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Reconfigured VM instance instance-00000026 to attach disk [datastore2] e6159a35-f073-4931-b0b0-832a88680356/e6159a35-f073-4931-b0b0-832a88680356.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.229781] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-808bd16d-28d2-44c6-8627-80d3afa5eba2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.239013] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 729.239013] env[65758]: value = "task-4660216" [ 729.239013] env[65758]: _type = "Task" [ 729.239013] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.247141] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.247141] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.247141] env[65758]: DEBUG nova.compute.manager [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 729.248941] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97275469-34c8-4a10-aac5-bc82715258ff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.255197] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660216, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.265803] env[65758]: DEBUG nova.compute.manager [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 729.265803] env[65758]: DEBUG nova.objects.instance [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'flavor' on Instance uuid 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 729.342782] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660211, 'name': CreateVM_Task, 'duration_secs': 1.552854} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.344252] env[65758]: WARNING neutronclient.v2_0.client [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 729.344983] env[65758]: WARNING openstack [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 729.345414] env[65758]: WARNING openstack [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 729.353845] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 729.357307] env[65758]: WARNING neutronclient.v2_0.client [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 729.357653] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.357811] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.358166] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 729.358642] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d2d8e61-fe7b-4b00-bcb7-26f935ee1791 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.364847] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 729.364847] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fa4fff-8d44-5bee-719f-a624b1565f77" [ 729.364847] env[65758]: _type = "Task" [ 729.364847] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.375108] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fa4fff-8d44-5bee-719f-a624b1565f77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.376551] env[65758]: WARNING neutronclient.v2_0.client [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 729.377667] env[65758]: WARNING openstack [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 729.377893] env[65758]: WARNING openstack [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 729.467388] env[65758]: WARNING neutronclient.v2_0.client [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 729.468284] env[65758]: WARNING openstack [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 729.469251] env[65758]: WARNING openstack [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 729.480846] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c985b04e-0ceb-470c-aaa1-162debd59082 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.490151] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4b96b7-242d-452b-ab16-1917ca501eda {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.525430] env[65758]: DEBUG nova.network.neutron [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updating instance_info_cache with network_info: [{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 729.529200] env[65758]: DEBUG nova.network.neutron [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Updating instance_info_cache with network_info: [{"id": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "address": "fa:16:3e:ab:ac:fb", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12074e98-54", "ovs_interfaceid": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 729.530887] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702b9f49-9b41-44d1-abf5-2d240ae8ced5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.549601] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2cbcbc-3ba1-4a11-8eea-98716afe5c9d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.570726] env[65758]: DEBUG nova.compute.provider_tree [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.667970] env[65758]: DEBUG oslo_vmware.api [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660215, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146657} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.668241] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.668848] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.668848] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.668848] env[65758]: INFO nova.compute.manager [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Took 1.67 seconds to destroy the instance on the hypervisor. [ 729.668984] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 729.669176] env[65758]: DEBUG nova.compute.manager [-] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 729.669271] env[65758]: DEBUG nova.network.neutron [-] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 729.669563] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 729.670188] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 729.670449] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 729.701257] env[65758]: WARNING neutronclient.v2_0.client [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 729.702734] env[65758]: WARNING openstack [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 729.702734] env[65758]: WARNING openstack [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 729.750612] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660216, 'name': Rename_Task, 'duration_secs': 0.188124} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.750978] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.751152] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18f4e710-d4a6-4d4a-ae48-de0681799734 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.760284] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 729.760284] env[65758]: value = "task-4660217" [ 729.760284] env[65758]: _type = "Task" [ 729.760284] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.765300] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 729.772154] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.852871] env[65758]: DEBUG nova.network.neutron [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Updated VIF entry in instance network info cache for port 528ce775-8b65-438e-b3a5-647df86651f8. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 729.853381] env[65758]: DEBUG nova.network.neutron [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Updating instance_info_cache with network_info: [{"id": "528ce775-8b65-438e-b3a5-647df86651f8", "address": "fa:16:3e:1f:c4:42", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap528ce775-8b", "ovs_interfaceid": "528ce775-8b65-438e-b3a5-647df86651f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 729.879561] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fa4fff-8d44-5bee-719f-a624b1565f77, 'name': SearchDatastore_Task, 'duration_secs': 0.010761} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.880046] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.880434] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 729.880658] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.880906] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.881129] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 729.881831] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ed7ef0a-db14-4b55-b99f-b67a953a2dcf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.893593] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.893784] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 729.894588] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84c258d4-ffe3-4f38-9bbc-cdc4c6a25f58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.901399] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 729.901399] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5226df2b-c3ef-1b81-9556-f6e15bd5633d" [ 729.901399] env[65758]: _type = "Task" [ 729.901399] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.911486] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5226df2b-c3ef-1b81-9556-f6e15bd5633d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.040818] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.041418] env[65758]: DEBUG nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Instance network_info: |[{"id": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "address": "fa:16:3e:ab:ac:fb", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12074e98-54", "ovs_interfaceid": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 730.042256] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Releasing lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.042592] env[65758]: DEBUG nova.compute.manager [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Inject network info {{(pid=65758) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7777}} [ 730.042951] env[65758]: DEBUG nova.compute.manager [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] network_info to inject: |[{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7778}} [ 730.051063] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Reconfiguring VM instance to set the machine id {{(pid=65758) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 730.052045] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:ac:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12074e98-5413-4e8e-bedf-73bb6ccc2248', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.064109] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 730.064877] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 730.065309] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00528e2e-89f3-441b-8905-b5396fd024d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.087384] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.087823] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f94e9500-8bad-4ab9-b931-7639ef885e08 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.090892] env[65758]: DEBUG nova.scheduler.client.report [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 730.096168] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e11bcc37-ea93-4d37-93f8-b7a7759e6881 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.126698] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.127571] env[65758]: DEBUG nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 730.131921] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.962s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.132366] env[65758]: DEBUG nova.objects.instance [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lazy-loading 'resources' on Instance uuid 24016efd-cdb3-4c1e-9c08-8643400e729e {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.137456] env[65758]: DEBUG oslo_vmware.api [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 730.137456] env[65758]: value = "task-4660218" [ 730.137456] env[65758]: _type = "Task" [ 730.137456] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.140166] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 730.140166] env[65758]: value = "task-4660219" [ 730.140166] env[65758]: _type = "Task" [ 730.140166] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.155135] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.155135] env[65758]: value = "task-4660220" [ 730.155135] env[65758]: _type = "Task" [ 730.155135] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.173039] env[65758]: DEBUG oslo_vmware.api [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4660218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.173517] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.183215] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660220, 'name': CreateVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.271635] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660217, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.274389] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 730.274742] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e661cfd-7a6b-4651-9c50-54633a09e066 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.285241] env[65758]: DEBUG oslo_vmware.api [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 730.285241] env[65758]: value = "task-4660221" [ 730.285241] env[65758]: _type = "Task" [ 730.285241] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.299515] env[65758]: DEBUG oslo_vmware.api [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.359673] env[65758]: DEBUG oslo_concurrency.lockutils [req-bab09c3b-acb5-449f-aaba-58e90b86e030 req-7689eb5b-699d-4b10-a127-c7337cbd44aa service nova] Releasing lock "refresh_cache-9118ff13-e2cf-404c-ae4d-2b9dbc52738d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.418168] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5226df2b-c3ef-1b81-9556-f6e15bd5633d, 'name': SearchDatastore_Task, 'duration_secs': 0.010007} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.419910] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1775618d-b599-4800-ae52-c994e29398a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.428489] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 730.428489] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f4852a-02c1-a5eb-a63a-7de56144ba74" [ 730.428489] env[65758]: _type = "Task" [ 730.428489] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.440896] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f4852a-02c1-a5eb-a63a-7de56144ba74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.462925] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquiring lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.462925] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.638151] env[65758]: DEBUG nova.network.neutron [-] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 730.642493] env[65758]: DEBUG nova.compute.utils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 730.644742] env[65758]: DEBUG nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 730.644973] env[65758]: DEBUG nova.network.neutron [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 730.645352] env[65758]: WARNING neutronclient.v2_0.client [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 730.645665] env[65758]: WARNING neutronclient.v2_0.client [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 730.646294] env[65758]: WARNING openstack [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 730.646681] env[65758]: WARNING openstack [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 730.678094] env[65758]: DEBUG oslo_vmware.api [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4660218, 'name': ReconfigVM_Task, 'duration_secs': 0.233069} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.685335] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fd119e6b-7605-4fc8-b476-2fe194b85ea1 tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Reconfigured VM instance to set the machine id {{(pid=65758) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 730.687895] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660220, 'name': CreateVM_Task, 'duration_secs': 0.403589} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.688693] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660219, 'name': PowerOffVM_Task, 'duration_secs': 0.253772} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.692645] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 730.692969] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 730.693832] env[65758]: WARNING neutronclient.v2_0.client [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 730.694362] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.694484] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.695027] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 730.696588] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f497f55-1063-4d38-9f63-1ec52ab4a4ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.701070] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f77f44c-4b34-41b8-a8d1-b65626b9641a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.708349] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 730.708349] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529c2145-f0f4-4fa1-c76d-8d41c31a28d4" [ 730.708349] env[65758]: _type = "Task" [ 730.708349] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.731864] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b298b5-7ef2-42e1-a7cd-17147cacb2f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.747327] env[65758]: DEBUG nova.policy [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffc98a9206034c3e9afb5a1685ff3688', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60dcbdfe17cb46fa8dfc1b7690f28b1f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 730.757828] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529c2145-f0f4-4fa1-c76d-8d41c31a28d4, 'name': SearchDatastore_Task, 'duration_secs': 0.011708} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.759855] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.759855] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 730.759855] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.779319] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660217, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.804118] env[65758]: DEBUG oslo_vmware.api [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660221, 'name': PowerOffVM_Task, 'duration_secs': 0.21521} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.806407] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 730.809623] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 730.809845] env[65758]: DEBUG nova.compute.manager [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 730.810171] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb7ce8e1-0296-43fe-9433-be2f5c7cf2a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.813164] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b284547-871c-4504-a07d-4f8ed5ead107 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.825444] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 730.825444] env[65758]: value = "task-4660222" [ 730.825444] env[65758]: _type = "Task" [ 730.825444] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.840309] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 730.840515] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 730.840785] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.919755] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquiring lock "0addcbb1-3561-4c93-b714-37e6b613b962" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.920043] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "0addcbb1-3561-4c93-b714-37e6b613b962" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.920279] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquiring lock "0addcbb1-3561-4c93-b714-37e6b613b962-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.920520] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "0addcbb1-3561-4c93-b714-37e6b613b962-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.920774] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "0addcbb1-3561-4c93-b714-37e6b613b962-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.927277] env[65758]: INFO nova.compute.manager [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Terminating instance [ 730.944823] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f4852a-02c1-a5eb-a63a-7de56144ba74, 'name': SearchDatastore_Task, 'duration_secs': 0.013273} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.947526] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.948093] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 9118ff13-e2cf-404c-ae4d-2b9dbc52738d/9118ff13-e2cf-404c-ae4d-2b9dbc52738d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 730.948717] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.948897] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 730.949145] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c042ef3a-01e7-43ee-b85c-c9edc59c8826 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.952957] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90aff53a-7b21-4ec4-ab50-60798336ceea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.962596] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 730.962596] env[65758]: value = "task-4660223" [ 730.962596] env[65758]: _type = "Task" [ 730.962596] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.963810] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 730.964024] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 730.967554] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afae1e84-5992-45fb-b880-da0fa104c016 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.974515] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 730.974515] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52271df8-662a-2e94-8dfd-4aaceb2b8c3e" [ 730.974515] env[65758]: _type = "Task" [ 730.974515] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.980622] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.990154] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52271df8-662a-2e94-8dfd-4aaceb2b8c3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.108665] env[65758]: DEBUG nova.network.neutron [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Successfully created port: 972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 731.146562] env[65758]: INFO nova.compute.manager [-] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Took 1.48 seconds to deallocate network for instance. [ 731.155530] env[65758]: DEBUG nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 731.273239] env[65758]: DEBUG oslo_vmware.api [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660217, 'name': PowerOnVM_Task, 'duration_secs': 1.057118} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.273568] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.273769] env[65758]: INFO nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Took 11.93 seconds to spawn the instance on the hypervisor. [ 731.273986] env[65758]: DEBUG nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 731.274919] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c1fa1f-710d-4a31-b9c6-7aeb5176df2e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.334449] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c2c9d4c1-0f84-46b2-8449-05575148eb56 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.086s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.391480] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc0bac9-5a43-4950-9a34-298056b7cc53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.398935] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfdb10c-1e4b-44d9-8f25-a4ff37e27f93 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.435046] env[65758]: DEBUG nova.compute.manager [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 731.435116] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.435933] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b76101d-dbe0-4727-a9e0-e15c1e17799f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.439879] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6d3015-d867-495d-b7cb-3f38fba15add {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.453843] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c563ad-0a04-47bb-b646-224caf370dac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.461589] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 731.461589] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc92b958-786f-4508-8b43-7e6f65c64a6b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.482282] env[65758]: DEBUG nova.compute.provider_tree [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.486559] env[65758]: DEBUG oslo_vmware.api [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 731.486559] env[65758]: value = "task-4660224" [ 731.486559] env[65758]: _type = "Task" [ 731.486559] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.500492] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660223, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.510913] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52271df8-662a-2e94-8dfd-4aaceb2b8c3e, 'name': SearchDatastore_Task, 'duration_secs': 0.035223} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.515268] env[65758]: DEBUG oslo_vmware.api [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.515562] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65299440-0a12-4f3f-a595-0654658e6075 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.522469] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 731.522469] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52299539-9ab0-bea7-522f-a1c2d525ba7f" [ 731.522469] env[65758]: _type = "Task" [ 731.522469] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.536639] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52299539-9ab0-bea7-522f-a1c2d525ba7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.654224] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.798580] env[65758]: INFO nova.compute.manager [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Took 39.38 seconds to build instance. [ 731.976303] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660223, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63587} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.977658] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 9118ff13-e2cf-404c-ae4d-2b9dbc52738d/9118ff13-e2cf-404c-ae4d-2b9dbc52738d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 731.977658] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 731.977658] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-222b573e-0040-4e1f-b5f3-981757a091b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.985960] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 731.985960] env[65758]: value = "task-4660225" [ 731.985960] env[65758]: _type = "Task" [ 731.985960] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.990821] env[65758]: DEBUG nova.scheduler.client.report [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 732.004744] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660225, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.013306] env[65758]: DEBUG oslo_vmware.api [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660224, 'name': PowerOffVM_Task, 'duration_secs': 0.345664} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.014677] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 732.014947] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 732.015340] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1c64128-3e6f-42c3-af24-47653474e8ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.035373] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52299539-9ab0-bea7-522f-a1c2d525ba7f, 'name': SearchDatastore_Task, 'duration_secs': 0.055723} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.036030] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.036309] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 54db018a-d54c-4fe5-9a6e-600e801e00b0/54db018a-d54c-4fe5-9a6e-600e801e00b0.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 732.036680] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.036779] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.037532] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a60bc4c0-21f6-4346-a37a-990d0d3b21b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.039245] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ed72d2d-2821-4e3c-a814-ba14c8d226b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.052743] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 732.052743] env[65758]: value = "task-4660227" [ 732.052743] env[65758]: _type = "Task" [ 732.052743] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.054348] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.054553] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 732.058797] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-001ddcec-ffc0-423d-840a-4f1acd144ba3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.066519] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 732.066519] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fa56dd-cc16-4121-ba4b-54524ce851cb" [ 732.066519] env[65758]: _type = "Task" [ 732.066519] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.075399] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.081805] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fa56dd-cc16-4121-ba4b-54524ce851cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.103998] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 732.104269] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 732.104440] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Deleting the datastore file [datastore2] 0addcbb1-3561-4c93-b714-37e6b613b962 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 732.104730] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e89b184-0c68-40e3-912d-d650d1b3759f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.113081] env[65758]: DEBUG oslo_vmware.api [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for the task: (returnval){ [ 732.113081] env[65758]: value = "task-4660228" [ 732.113081] env[65758]: _type = "Task" [ 732.113081] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.122756] env[65758]: DEBUG oslo_vmware.api [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.164891] env[65758]: DEBUG nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 732.194985] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 732.195331] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 732.195546] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 732.195793] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 732.196120] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 732.196302] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 732.196536] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 732.196708] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 732.196893] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 732.197085] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 732.197286] env[65758]: DEBUG nova.virt.hardware [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 732.198307] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef27dde0-3515-4fa7-8517-c608e8b75466 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.208454] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cd4b98-13b9-49e7-80f3-63acbc8356d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.301072] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8160eb93-55a4-48ad-bfe5-bca421acdf3b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.894s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.497360] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660225, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085991} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.497804] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 732.498582] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74aba73c-dac0-403b-9a95-17c10eb8f120 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.501886] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.370s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.504388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.895s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.505762] env[65758]: DEBUG nova.objects.instance [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lazy-loading 'resources' on Instance uuid 2bd02c6d-a139-4259-8b28-eed5efc5d094 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 732.529127] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 9118ff13-e2cf-404c-ae4d-2b9dbc52738d/9118ff13-e2cf-404c-ae4d-2b9dbc52738d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 732.530411] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6dc55fa-a0d9-4f4f-8d93-28c12d9d5b5a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.546190] env[65758]: INFO nova.scheduler.client.report [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Deleted allocations for instance 24016efd-cdb3-4c1e-9c08-8643400e729e [ 732.560617] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 732.560617] env[65758]: value = "task-4660229" [ 732.560617] env[65758]: _type = "Task" [ 732.560617] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.569600] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660227, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.576489] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660229, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.585112] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fa56dd-cc16-4121-ba4b-54524ce851cb, 'name': SearchDatastore_Task, 'duration_secs': 0.022152} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.587375] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd2d9f3c-f162-41a2-8c1f-924bfd5939f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.593692] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 732.593692] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52438a2b-4c97-780d-c63a-7b31ace3832f" [ 732.593692] env[65758]: _type = "Task" [ 732.593692] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.605106] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52438a2b-4c97-780d-c63a-7b31ace3832f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.625497] env[65758]: DEBUG oslo_vmware.api [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Task: {'id': task-4660228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.455498} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.625497] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.625640] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 732.625798] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.625982] env[65758]: INFO nova.compute.manager [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Took 1.19 seconds to destroy the instance on the hypervisor. [ 732.626245] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 732.626446] env[65758]: DEBUG nova.compute.manager [-] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 732.626553] env[65758]: DEBUG nova.network.neutron [-] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 732.626865] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 732.627462] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 732.627795] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 732.804051] env[65758]: DEBUG nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 732.836733] env[65758]: DEBUG nova.network.neutron [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Successfully updated port: 972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 732.932889] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 733.057740] env[65758]: DEBUG oslo_concurrency.lockutils [None req-944101f8-34dd-4d9e-9937-e8a8f8689e4b tempest-DeleteServersAdminTestJSON-2062199305 tempest-DeleteServersAdminTestJSON-2062199305-project-member] Lock "24016efd-cdb3-4c1e-9c08-8643400e729e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.502s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.079306] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582043} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.079306] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660229, 'name': ReconfigVM_Task, 'duration_secs': 0.312624} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.080337] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 54db018a-d54c-4fe5-9a6e-600e801e00b0/54db018a-d54c-4fe5-9a6e-600e801e00b0.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 733.080623] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 733.080958] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 9118ff13-e2cf-404c-ae4d-2b9dbc52738d/9118ff13-e2cf-404c-ae4d-2b9dbc52738d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.081652] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88ed7f23-e96f-4d87-ad9b-4e72861f7ab4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.083930] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4aa22509-f534-417f-97b3-add361c520d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.094931] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 733.094931] env[65758]: value = "task-4660230" [ 733.094931] env[65758]: _type = "Task" [ 733.094931] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.094931] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 733.094931] env[65758]: value = "task-4660231" [ 733.094931] env[65758]: _type = "Task" [ 733.094931] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.117382] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52438a2b-4c97-780d-c63a-7b31ace3832f, 'name': SearchDatastore_Task, 'duration_secs': 0.025039} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.123932] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.124370] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. {{(pid=65758) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 733.124740] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660230, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.125400] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660231, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.128246] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2d5a1bf-f5d0-448c-9135-761f2dc7ef30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.137551] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 733.137551] env[65758]: value = "task-4660232" [ 733.137551] env[65758]: _type = "Task" [ 733.137551] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.149117] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660232, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.341245] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.341894] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquired lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.341894] env[65758]: DEBUG nova.network.neutron [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 733.348484] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.619034] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660231, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072928} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.619034] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660230, 'name': Rename_Task, 'duration_secs': 0.236367} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.619272] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.619532] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 733.620217] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdb3af6-281e-49a5-9852-2cf55f2732cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.623280] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4bb0879-bf0e-46c3-bb78-3c6a690c088d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.651442] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 54db018a-d54c-4fe5-9a6e-600e801e00b0/54db018a-d54c-4fe5-9a6e-600e801e00b0.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.659541] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-003450c4-7ce9-444c-885c-ee2f88bf25f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.675480] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 733.675480] env[65758]: value = "task-4660233" [ 733.675480] env[65758]: _type = "Task" [ 733.675480] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.684622] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660232, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.686686] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 733.686686] env[65758]: value = "task-4660234" [ 733.686686] env[65758]: _type = "Task" [ 733.686686] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.695244] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660233, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.703433] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.726918] env[65758]: DEBUG nova.compute.manager [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Received event network-changed-fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 733.726918] env[65758]: DEBUG nova.compute.manager [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Refreshing instance network info cache due to event network-changed-fb1e683c-095a-4512-a0a0-ec651a275ab8. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 733.727264] env[65758]: DEBUG oslo_concurrency.lockutils [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] Acquiring lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.727500] env[65758]: DEBUG oslo_concurrency.lockutils [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] Acquired lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.727746] env[65758]: DEBUG nova.network.neutron [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Refreshing network info cache for port fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 733.766892] env[65758]: DEBUG nova.compute.manager [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Received event network-changed-12074e98-5413-4e8e-bedf-73bb6ccc2248 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 733.766892] env[65758]: DEBUG nova.compute.manager [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Refreshing instance network info cache due to event network-changed-12074e98-5413-4e8e-bedf-73bb6ccc2248. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 733.767207] env[65758]: DEBUG oslo_concurrency.lockutils [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] Acquiring lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.767304] env[65758]: DEBUG oslo_concurrency.lockutils [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] Acquired lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.767435] env[65758]: DEBUG nova.network.neutron [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Refreshing network info cache for port 12074e98-5413-4e8e-bedf-73bb6ccc2248 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 733.774472] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a889f6-25d7-410f-8546-5d31784ac250 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.785837] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f58803-f36c-44c1-9b8c-cdede69b6c84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.819388] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91264bf-b897-4508-895c-9710fa488240 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.829111] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00f2413-85e9-4731-a13f-caf47e58503e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.849089] env[65758]: DEBUG nova.compute.provider_tree [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.849923] env[65758]: WARNING openstack [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 733.850151] env[65758]: WARNING openstack [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 733.925092] env[65758]: DEBUG nova.network.neutron [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 734.003814] env[65758]: WARNING neutronclient.v2_0.client [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 734.004486] env[65758]: WARNING openstack [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 734.004828] env[65758]: WARNING openstack [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 734.013401] env[65758]: DEBUG nova.network.neutron [-] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 734.099570] env[65758]: DEBUG nova.network.neutron [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updating instance_info_cache with network_info: [{"id": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "address": "fa:16:3e:f8:f1:bd", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap972faaf3-0e", "ovs_interfaceid": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 734.154601] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660232, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654876} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.155056] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. [ 734.156173] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32f8827-9eff-4408-b64b-612ed1fcf4bd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.191604] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 734.193950] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da29f848-5e52-4ea7-a774-17d289df4b26 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.217270] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660233, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.222030] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660234, 'name': ReconfigVM_Task, 'duration_secs': 0.338771} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.222333] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 734.222333] env[65758]: value = "task-4660235" [ 734.222333] env[65758]: _type = "Task" [ 734.222333] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.222561] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 54db018a-d54c-4fe5-9a6e-600e801e00b0/54db018a-d54c-4fe5-9a6e-600e801e00b0.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.223356] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6141464e-3d1b-4f36-bb2b-02870345b833 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.231166] env[65758]: WARNING neutronclient.v2_0.client [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 734.231960] env[65758]: WARNING openstack [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 734.232362] env[65758]: WARNING openstack [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 734.246866] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 734.246866] env[65758]: value = "task-4660236" [ 734.246866] env[65758]: _type = "Task" [ 734.246866] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.247181] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660235, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.257076] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660236, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.278562] env[65758]: WARNING neutronclient.v2_0.client [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 734.279259] env[65758]: WARNING openstack [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 734.279626] env[65758]: WARNING openstack [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 734.359311] env[65758]: DEBUG nova.scheduler.client.report [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 734.518043] env[65758]: WARNING neutronclient.v2_0.client [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 734.518043] env[65758]: WARNING openstack [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 734.518043] env[65758]: WARNING openstack [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 734.525773] env[65758]: INFO nova.compute.manager [-] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Took 1.90 seconds to deallocate network for instance. [ 734.553165] env[65758]: WARNING neutronclient.v2_0.client [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 734.553165] env[65758]: WARNING openstack [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 734.553165] env[65758]: WARNING openstack [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 734.603442] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Releasing lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.604838] env[65758]: DEBUG nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Instance network_info: |[{"id": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "address": "fa:16:3e:f8:f1:bd", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap972faaf3-0e", "ovs_interfaceid": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 734.604838] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:f1:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9bc2632-36f9-4912-8782-8bbb789f909d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '972faaf3-0ee4-4d20-a393-b48d940dbae2', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.613784] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Creating folder: Project (60dcbdfe17cb46fa8dfc1b7690f28b1f). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.614572] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-272414a1-fef5-421a-b63e-76ae8e9855a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.633523] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Created folder: Project (60dcbdfe17cb46fa8dfc1b7690f28b1f) in parent group-v909763. [ 734.633523] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Creating folder: Instances. Parent ref: group-v909869. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.633632] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1ed612a-ae16-4d1b-bc39-f2d907e78af2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.643297] env[65758]: DEBUG nova.network.neutron [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Updated VIF entry in instance network info cache for port 12074e98-5413-4e8e-bedf-73bb6ccc2248. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 734.643660] env[65758]: DEBUG nova.network.neutron [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Updating instance_info_cache with network_info: [{"id": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "address": "fa:16:3e:ab:ac:fb", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12074e98-54", "ovs_interfaceid": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 734.648443] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Created folder: Instances in parent group-v909869. [ 734.648687] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 734.648872] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cca3e019-8e82-4473-8609-291703762a6e] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 734.649115] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e890b3a4-ae2c-4593-a0b8-3f583edb2acf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.674015] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.674015] env[65758]: value = "task-4660239" [ 734.674015] env[65758]: _type = "Task" [ 734.674015] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.685402] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660239, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.697319] env[65758]: DEBUG oslo_vmware.api [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660233, 'name': PowerOnVM_Task, 'duration_secs': 0.621105} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.697319] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 734.697541] env[65758]: INFO nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Took 10.05 seconds to spawn the instance on the hypervisor. [ 734.697541] env[65758]: DEBUG nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 734.698436] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34433ae-ad6a-490d-b3e4-11cd0b7a72be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.705745] env[65758]: DEBUG nova.network.neutron [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updated VIF entry in instance network info cache for port fb1e683c-095a-4512-a0a0-ec651a275ab8. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 734.706273] env[65758]: DEBUG nova.network.neutron [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updating instance_info_cache with network_info: [{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 734.713118] env[65758]: DEBUG nova.objects.instance [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lazy-loading 'flavor' on Instance uuid adc1b956-1b5a-4272-b0ff-95a565e9c45c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 734.742327] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.761061] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660236, 'name': Rename_Task, 'duration_secs': 0.200171} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.761061] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 734.761228] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54b9dea9-cbbf-4ba6-a989-47e3b2088722 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.769276] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 734.769276] env[65758]: value = "task-4660240" [ 734.769276] env[65758]: _type = "Task" [ 734.769276] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.779335] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660240, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.870641] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.366s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.874892] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.978s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.877942] env[65758]: INFO nova.compute.claims [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.908237] env[65758]: INFO nova.scheduler.client.report [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Deleted allocations for instance 2bd02c6d-a139-4259-8b28-eed5efc5d094 [ 735.037928] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.147558] env[65758]: DEBUG oslo_concurrency.lockutils [req-4ec0788c-af7f-45bd-bdf9-3867b82ad90e req-5072de5c-d32c-46a3-87ca-731a79e50beb service nova] Releasing lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.185751] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660239, 'name': CreateVM_Task, 'duration_secs': 0.398752} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.186655] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cca3e019-8e82-4473-8609-291703762a6e] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 735.186753] env[65758]: WARNING neutronclient.v2_0.client [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 735.187174] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.187678] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.188419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 735.188885] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ca85c7f-fd0d-4005-b558-2ab95c1d6bea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.196663] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 735.196663] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52246fe6-5703-48fb-7711-d0c3f7b63f26" [ 735.196663] env[65758]: _type = "Task" [ 735.196663] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.208346] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52246fe6-5703-48fb-7711-d0c3f7b63f26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.210261] env[65758]: DEBUG oslo_concurrency.lockutils [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] Releasing lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.210321] env[65758]: DEBUG nova.compute.manager [req-f7a2e1af-747d-4ac5-b119-68280299d8a8 req-f8bf0d9b-2513-4a5e-924e-452272a56582 service nova] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Received event network-vif-deleted-b4a1d8f6-c296-4e9a-9582-489f0ebc77a6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 735.232434] env[65758]: DEBUG oslo_concurrency.lockutils [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.233104] env[65758]: DEBUG oslo_concurrency.lockutils [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquired lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.233405] env[65758]: WARNING neutronclient.v2_0.client [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 735.245991] env[65758]: INFO nova.compute.manager [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Took 40.09 seconds to build instance. [ 735.254133] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660235, 'name': ReconfigVM_Task, 'duration_secs': 0.897023} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.254459] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 735.255914] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b184b474-69e7-4494-8cd3-376625e98ba4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.291668] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88e35977-be20-488b-979e-f0d40d3c55cd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.312470] env[65758]: DEBUG oslo_vmware.api [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660240, 'name': PowerOnVM_Task, 'duration_secs': 0.531693} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.314949] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 735.314949] env[65758]: INFO nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Took 7.86 seconds to spawn the instance on the hypervisor. [ 735.314949] env[65758]: DEBUG nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 735.315158] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 735.315158] env[65758]: value = "task-4660241" [ 735.315158] env[65758]: _type = "Task" [ 735.315158] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.316038] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ee1926-d1e3-4aba-a4b1-5dfadbddbfa0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.338338] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660241, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.416882] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2ea31cf4-ec41-46c8-b597-792fa5245996 tempest-ImagesNegativeTestJSON-1970577624 tempest-ImagesNegativeTestJSON-1970577624-project-member] Lock "2bd02c6d-a139-4259-8b28-eed5efc5d094" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.361s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.603343] env[65758]: INFO nova.compute.manager [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Rebuilding instance [ 735.676663] env[65758]: DEBUG nova.compute.manager [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 735.677466] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9440b8f6-5d42-4dae-958f-614c0b9a45d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.710223] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52246fe6-5703-48fb-7711-d0c3f7b63f26, 'name': SearchDatastore_Task, 'duration_secs': 0.016793} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.710223] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.710434] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.710660] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.710820] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.710958] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.711280] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0628f85-eafd-4fef-b1c7-49022eff063d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.723213] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.723559] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 735.724439] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34837e1b-b890-4282-a336-72f43c5620b3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.733949] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 735.733949] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52cbabf8-5c37-6686-a7fd-41280914d9fc" [ 735.733949] env[65758]: _type = "Task" [ 735.733949] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.746697] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52cbabf8-5c37-6686-a7fd-41280914d9fc, 'name': SearchDatastore_Task, 'duration_secs': 0.011459} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.748029] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c79a5945-1330-4e90-aa5e-cc4707110eb8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.750550] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0ddda7fc-c11f-4db3-9f17-8c180472ef49 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.603s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.757027] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 735.757027] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e3f4a2-1269-4a6e-ea92-0250ef19838b" [ 735.757027] env[65758]: _type = "Task" [ 735.757027] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.767814] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e3f4a2-1269-4a6e-ea92-0250ef19838b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.832042] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660241, 'name': ReconfigVM_Task, 'duration_secs': 0.415576} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.832042] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 735.832042] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebe88c8a-a4f1-4844-958f-360e38a2f6c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.839120] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 735.839120] env[65758]: value = "task-4660242" [ 735.839120] env[65758]: _type = "Task" [ 735.839120] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.856593] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.856593] env[65758]: INFO nova.compute.manager [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Took 38.50 seconds to build instance. [ 735.895661] env[65758]: DEBUG nova.network.neutron [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 736.255355] env[65758]: DEBUG nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 736.274203] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e3f4a2-1269-4a6e-ea92-0250ef19838b, 'name': SearchDatastore_Task, 'duration_secs': 0.014767} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.274534] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.275785] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] cca3e019-8e82-4473-8609-291703762a6e/cca3e019-8e82-4473-8609-291703762a6e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 736.276356] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d8fa730-4520-4bc1-884c-479b0ab73ebc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.288130] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 736.288130] env[65758]: value = "task-4660243" [ 736.288130] env[65758]: _type = "Task" [ 736.288130] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.302721] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.354140] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660242, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.358538] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4597ec15-e1bc-4415-9ee7-9653e55abbf0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "54db018a-d54c-4fe5-9a6e-600e801e00b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.014s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.399060] env[65758]: WARNING neutronclient.v2_0.client [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 736.399865] env[65758]: WARNING openstack [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 736.400244] env[65758]: WARNING openstack [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 736.505886] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0e2687-f60c-436d-846c-f6d1b03386ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.514891] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9f6768-546d-49c2-86bb-1164823d24f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.521952] env[65758]: DEBUG nova.compute.manager [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Received event network-changed-b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 736.522265] env[65758]: DEBUG nova.compute.manager [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Refreshing instance network info cache due to event network-changed-b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 736.522511] env[65758]: DEBUG oslo_concurrency.lockutils [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] Acquiring lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.522727] env[65758]: DEBUG oslo_concurrency.lockutils [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] Acquired lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.524220] env[65758]: DEBUG nova.network.neutron [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Refreshing network info cache for port b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 736.565097] env[65758]: DEBUG nova.compute.manager [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Received event network-vif-plugged-972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 736.565336] env[65758]: DEBUG oslo_concurrency.lockutils [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Acquiring lock "cca3e019-8e82-4473-8609-291703762a6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.565550] env[65758]: DEBUG oslo_concurrency.lockutils [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Lock "cca3e019-8e82-4473-8609-291703762a6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.565714] env[65758]: DEBUG oslo_concurrency.lockutils [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Lock "cca3e019-8e82-4473-8609-291703762a6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.565883] env[65758]: DEBUG nova.compute.manager [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] No waiting events found dispatching network-vif-plugged-972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 736.566098] env[65758]: WARNING nova.compute.manager [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Received unexpected event network-vif-plugged-972faaf3-0ee4-4d20-a393-b48d940dbae2 for instance with vm_state building and task_state spawning. [ 736.569803] env[65758]: DEBUG nova.compute.manager [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Received event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 736.569803] env[65758]: DEBUG nova.compute.manager [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing instance network info cache due to event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 736.569803] env[65758]: DEBUG oslo_concurrency.lockutils [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Acquiring lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.569803] env[65758]: DEBUG oslo_concurrency.lockutils [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Acquired lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.569803] env[65758]: DEBUG nova.network.neutron [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 736.569803] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1161cf67-5552-4489-851b-3f0faf06a9fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.582279] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03f3ba5-07c0-4a19-9366-4290a684ea52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.599480] env[65758]: DEBUG nova.compute.provider_tree [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.607852] env[65758]: WARNING neutronclient.v2_0.client [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 736.608529] env[65758]: WARNING openstack [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 736.608881] env[65758]: WARNING openstack [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 736.698311] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 736.698647] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09960711-b0e7-43e6-b941-62e364995b80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.710027] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 736.710027] env[65758]: value = "task-4660244" [ 736.710027] env[65758]: _type = "Task" [ 736.710027] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.719233] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.729306] env[65758]: DEBUG nova.network.neutron [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updating instance_info_cache with network_info: [{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 736.793912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.802159] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660243, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.858929] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660242, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.862293] env[65758]: DEBUG nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 736.991182] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Acquiring lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.991182] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Acquired lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.991997] env[65758]: DEBUG nova.network.neutron [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 737.030701] env[65758]: WARNING neutronclient.v2_0.client [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 737.031260] env[65758]: WARNING openstack [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 737.031524] env[65758]: WARNING openstack [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 737.075648] env[65758]: WARNING neutronclient.v2_0.client [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 737.076417] env[65758]: WARNING openstack [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 737.076768] env[65758]: WARNING openstack [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 737.087718] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.087718] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.103230] env[65758]: DEBUG nova.scheduler.client.report [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 737.223683] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 737.223816] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 737.224634] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de11417-6b5c-4605-b645-e34746a9562d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.234833] env[65758]: DEBUG oslo_concurrency.lockutils [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Releasing lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.234941] env[65758]: DEBUG nova.compute.manager [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Inject network info {{(pid=65758) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7777}} [ 737.235232] env[65758]: DEBUG nova.compute.manager [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] network_info to inject: |[{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7778}} [ 737.240099] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Reconfiguring VM instance to set the machine id {{(pid=65758) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 737.240546] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 737.243957] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f52417d1-594a-4ab1-876f-9b6d20747ef4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.254351] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50e85813-622f-4b57-b3a7-c8b7d68e578e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.267275] env[65758]: DEBUG oslo_vmware.api [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 737.267275] env[65758]: value = "task-4660245" [ 737.267275] env[65758]: _type = "Task" [ 737.267275] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.278877] env[65758]: DEBUG oslo_vmware.api [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4660245, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.303892] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.836493} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.304964] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] cca3e019-8e82-4473-8609-291703762a6e/cca3e019-8e82-4473-8609-291703762a6e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 737.306333] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.306509] env[65758]: WARNING neutronclient.v2_0.client [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 737.307183] env[65758]: WARNING openstack [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 737.307528] env[65758]: WARNING openstack [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 737.315641] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b40197eb-42bb-49bd-ab71-f60406a4312e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.325691] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 737.325691] env[65758]: value = "task-4660247" [ 737.325691] env[65758]: _type = "Task" [ 737.325691] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.340631] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.342099] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 737.342478] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 737.342551] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleting the datastore file [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 737.342896] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-327f0e26-7274-47d4-a673-3cc3ab72ef23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.350491] env[65758]: WARNING neutronclient.v2_0.client [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 737.351393] env[65758]: WARNING openstack [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 737.351915] env[65758]: WARNING openstack [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 737.363701] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 737.363701] env[65758]: value = "task-4660248" [ 737.363701] env[65758]: _type = "Task" [ 737.363701] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.373532] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660242, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.384114] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.392988] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.495197] env[65758]: WARNING neutronclient.v2_0.client [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 737.496093] env[65758]: WARNING openstack [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 737.496462] env[65758]: WARNING openstack [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 737.520975] env[65758]: DEBUG nova.network.neutron [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updated VIF entry in instance network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 737.521439] env[65758]: DEBUG nova.network.neutron [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updating instance_info_cache with network_info: [{"id": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "address": "fa:16:3e:f8:f1:bd", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap972faaf3-0e", "ovs_interfaceid": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 737.601471] env[65758]: DEBUG nova.network.neutron [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updated VIF entry in instance network info cache for port b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 737.601992] env[65758]: DEBUG nova.network.neutron [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 737.617084] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.617084] env[65758]: DEBUG nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 737.621025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.614s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.621415] env[65758]: DEBUG nova.objects.instance [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lazy-loading 'resources' on Instance uuid 492d1063-8eaf-4207-8d65-341fbc0b6c39 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 737.709961] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.710566] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.710794] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.710982] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.711173] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.713700] env[65758]: INFO nova.compute.manager [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Terminating instance [ 737.779271] env[65758]: DEBUG oslo_vmware.api [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4660245, 'name': ReconfigVM_Task, 'duration_secs': 0.241667} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.779463] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-518e2d42-a442-477c-a321-ea61e355213d tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Reconfigured VM instance to set the machine id {{(pid=65758) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 737.837009] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660247, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088227} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.837689] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.839810] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e743a7f-a525-4231-8bea-43e25606d448 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.870449] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] cca3e019-8e82-4473-8609-291703762a6e/cca3e019-8e82-4473-8609-291703762a6e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.872034] env[65758]: WARNING neutronclient.v2_0.client [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 737.872711] env[65758]: WARNING openstack [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 737.873140] env[65758]: WARNING openstack [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 737.884241] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0987234a-bdca-442c-8294-fa5979dbe769 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.909938] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318934} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.914445] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 737.914659] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 737.914900] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 737.917491] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660242, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.917997] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 737.917997] env[65758]: value = "task-4660249" [ 737.917997] env[65758]: _type = "Task" [ 737.917997] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.928578] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660249, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.024862] env[65758]: DEBUG oslo_concurrency.lockutils [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] Releasing lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.025151] env[65758]: DEBUG nova.compute.manager [req-bbe7c9c1-80c9-4121-ab0d-b72ec8aaea2a req-c34e6259-db93-423d-b006-3bb8f4d046aa service nova] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Received event network-vif-deleted-872949b5-9bac-4f83-acec-93e23be464c5 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 738.041743] env[65758]: DEBUG nova.network.neutron [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Updating instance_info_cache with network_info: [{"id": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "address": "fa:16:3e:ab:ac:fb", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12074e98-54", "ovs_interfaceid": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 738.106353] env[65758]: DEBUG oslo_concurrency.lockutils [req-eebaa8b7-6ac3-4a2e-b8d6-66d555b3fda7 req-9302af9d-f332-483e-a626-d2678c175cd5 service nova] Releasing lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.124641] env[65758]: DEBUG nova.compute.utils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 738.128573] env[65758]: DEBUG nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 738.128794] env[65758]: DEBUG nova.network.neutron [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 738.129191] env[65758]: WARNING neutronclient.v2_0.client [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 738.129526] env[65758]: WARNING neutronclient.v2_0.client [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 738.130145] env[65758]: WARNING openstack [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 738.131327] env[65758]: WARNING openstack [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 738.181960] env[65758]: DEBUG nova.policy [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '742a9f6633b54c6f8cd432ac94b59e25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e3a324879d646699f950687546ea861', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 738.219530] env[65758]: DEBUG nova.compute.manager [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 738.219726] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.220760] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c12151-a31a-46d1-9ce8-f1673ad27e14 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.235218] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.235533] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-587718eb-fb22-49d7-a21b-6110366b6d82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.242916] env[65758]: DEBUG oslo_vmware.api [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 738.242916] env[65758]: value = "task-4660250" [ 738.242916] env[65758]: _type = "Task" [ 738.242916] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.254633] env[65758]: DEBUG oslo_vmware.api [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4660250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.374303] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660242, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.437233] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660249, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.544908] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Releasing lock "refresh_cache-54db018a-d54c-4fe5-9a6e-600e801e00b0" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.545612] env[65758]: DEBUG nova.compute.manager [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Inject network info {{(pid=65758) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7777}} [ 738.546151] env[65758]: DEBUG nova.compute.manager [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] network_info to inject: |[{"id": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "address": "fa:16:3e:ab:ac:fb", "network": {"id": "2204c81c-c112-4625-93b2-3f54e100d51e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-105392857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9aaf5b39abda42f28a847d5fe0d0ecec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12074e98-54", "ovs_interfaceid": "12074e98-5413-4e8e-bedf-73bb6ccc2248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7778}} [ 738.556867] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Reconfiguring VM instance to set the machine id {{(pid=65758) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 738.557949] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f017ac7-1bb0-4ad8-94c7-3fe65bc6e778 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.582722] env[65758]: DEBUG oslo_vmware.api [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Waiting for the task: (returnval){ [ 738.582722] env[65758]: value = "task-4660251" [ 738.582722] env[65758]: _type = "Task" [ 738.582722] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.594886] env[65758]: DEBUG oslo_vmware.api [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Task: {'id': task-4660251, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.600284] env[65758]: DEBUG nova.network.neutron [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Successfully created port: 12b480c3-4c9e-4da0-9f51-8b29cd9f54ce {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 738.641387] env[65758]: DEBUG nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 738.758335] env[65758]: DEBUG oslo_vmware.api [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4660250, 'name': PowerOffVM_Task, 'duration_secs': 0.282306} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.758974] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.759189] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.759470] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bf3635d-acc0-4e03-b1aa-fca810eba8a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.849294] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.849294] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.849294] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Deleting the datastore file [datastore1] adc1b956-1b5a-4272-b0ff-95a565e9c45c {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.849294] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7feec705-9263-47df-8d55-5ecb788f5271 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.855305] env[65758]: DEBUG oslo_vmware.api [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for the task: (returnval){ [ 738.855305] env[65758]: value = "task-4660253" [ 738.855305] env[65758]: _type = "Task" [ 738.855305] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.866568] env[65758]: DEBUG nova.compute.manager [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Received event network-changed-fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 738.866568] env[65758]: DEBUG nova.compute.manager [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Refreshing instance network info cache due to event network-changed-fb1e683c-095a-4512-a0a0-ec651a275ab8. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 738.866568] env[65758]: DEBUG oslo_concurrency.lockutils [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] Acquiring lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.866568] env[65758]: DEBUG oslo_concurrency.lockutils [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] Acquired lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.866796] env[65758]: DEBUG nova.network.neutron [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Refreshing network info cache for port fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 738.877891] env[65758]: DEBUG oslo_vmware.api [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4660253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.883392] env[65758]: DEBUG oslo_vmware.api [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660242, 'name': PowerOnVM_Task, 'duration_secs': 2.789893} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.883533] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 738.887026] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3b5974-6e0e-47a8-a101-e577256d14ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.891896] env[65758]: DEBUG nova.compute.manager [None req-3b0be415-ccdc-454d-8d6e-a4f5f08e8320 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 738.892041] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9703f067-6959-4339-8e5c-2059941c1e9a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.908061] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e570ddd8-5d0e-458f-b6aa-0bff2b131c0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.953705] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9c972f-2932-4bb5-8510-8e6b5ea42091 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.963835] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660249, 'name': ReconfigVM_Task, 'duration_secs': 0.591179} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.966378] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Reconfigured VM instance instance-00000029 to attach disk [datastore2] cca3e019-8e82-4473-8609-291703762a6e/cca3e019-8e82-4473-8609-291703762a6e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 738.969112] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ed602a1-33d0-4b62-9fa5-029b0dfd3f8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.972084] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885b5943-27e3-4490-968e-d2ba8599ae7f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.989174] env[65758]: DEBUG nova.compute.provider_tree [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.992781] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 738.993021] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 738.993191] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 738.993371] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 738.993512] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 738.993653] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 738.993855] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 738.994010] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 738.994189] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 738.994344] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 738.994509] env[65758]: DEBUG nova.virt.hardware [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 738.997344] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006a62e0-2333-4cab-8b0e-dbf1d705b819 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.003422] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 739.003422] env[65758]: value = "task-4660254" [ 739.003422] env[65758]: _type = "Task" [ 739.003422] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.003422] env[65758]: DEBUG nova.scheduler.client.report [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 739.018735] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc5fdb5-4636-4dcd-89eb-7dea89791128 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.028188] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660254, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.041539] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:bb:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0cad03bd-bdfb-4780-a072-70a72be1d8b2', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.049546] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 739.050311] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.050626] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1113dc67-19a1-4c73-b1a4-5a3b48c8f34f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.074945] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.074945] env[65758]: value = "task-4660255" [ 739.074945] env[65758]: _type = "Task" [ 739.074945] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.084687] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660255, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.093135] env[65758]: DEBUG oslo_vmware.api [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] Task: {'id': task-4660251, 'name': ReconfigVM_Task, 'duration_secs': 0.202705} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.093135] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0878fb86-086b-4c75-8069-af449c13423d tempest-ServersAdminTestJSON-1409953608 tempest-ServersAdminTestJSON-1409953608-project-admin] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Reconfigured VM instance to set the machine id {{(pid=65758) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 739.368476] env[65758]: DEBUG oslo_vmware.api [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Task: {'id': task-4660253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303493} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.368816] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.368989] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.369178] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.369346] env[65758]: INFO nova.compute.manager [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 739.370278] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 739.370278] env[65758]: DEBUG nova.compute.manager [-] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 739.370278] env[65758]: DEBUG nova.network.neutron [-] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 739.370278] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 739.370913] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 739.371220] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 739.378675] env[65758]: WARNING neutronclient.v2_0.client [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 739.379242] env[65758]: WARNING openstack [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 739.379603] env[65758]: WARNING openstack [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 739.508930] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.887s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.514049] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.090s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.514424] env[65758]: DEBUG nova.objects.instance [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lazy-loading 'resources' on Instance uuid e48a075b-41b3-4612-bd5f-0a158d707a2f {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 739.521916] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660254, 'name': Rename_Task, 'duration_secs': 0.206836} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.522240] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 739.522505] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c22a6359-8e16-49d1-98e1-526e342c6f93 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.528078] env[65758]: INFO nova.scheduler.client.report [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleted allocations for instance 492d1063-8eaf-4207-8d65-341fbc0b6c39 [ 739.533507] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 739.533507] env[65758]: value = "task-4660256" [ 739.533507] env[65758]: _type = "Task" [ 739.533507] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.546285] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.586691] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660255, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.656141] env[65758]: DEBUG nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 739.688952] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 739.689230] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 739.689407] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 739.689666] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 739.689822] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 739.689969] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 739.690197] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 739.690362] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 739.690545] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 739.690712] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 739.690916] env[65758]: DEBUG nova.virt.hardware [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 739.691840] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7165558-47a2-4973-995b-f704505e1d2c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.701853] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607e2823-2895-43db-b172-927b74559b30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.912021] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 740.037513] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a27c9e49-45da-40da-a532-74f87d4d4a8a tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "492d1063-8eaf-4207-8d65-341fbc0b6c39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.404s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.054217] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660256, 'name': PowerOnVM_Task} progress is 78%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.088152] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660255, 'name': CreateVM_Task, 'duration_secs': 0.594106} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.090407] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 740.091890] env[65758]: WARNING neutronclient.v2_0.client [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 740.091890] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.091890] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.093522] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 740.093522] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26ff4376-0e62-4da1-9f74-efbb04af440d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.099713] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 740.099713] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c61470-d611-7a59-9337-94ea8b8ead80" [ 740.099713] env[65758]: _type = "Task" [ 740.099713] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.110258] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c61470-d611-7a59-9337-94ea8b8ead80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.238573] env[65758]: DEBUG nova.network.neutron [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Successfully updated port: 12b480c3-4c9e-4da0-9f51-8b29cd9f54ce {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 740.550183] env[65758]: DEBUG oslo_vmware.api [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660256, 'name': PowerOnVM_Task, 'duration_secs': 0.94025} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.550486] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 740.550701] env[65758]: INFO nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Took 8.39 seconds to spawn the instance on the hypervisor. [ 740.551372] env[65758]: DEBUG nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 740.552354] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af72c030-6012-4d60-a8be-c53923b2163e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.621822] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c61470-d611-7a59-9337-94ea8b8ead80, 'name': SearchDatastore_Task, 'duration_secs': 0.012526} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.627605] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.629340] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 740.629340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.629340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.629340] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 740.629340] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c24bd3ee-a6d3-43ed-8fc5-62b06e76b984 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.643224] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 740.643479] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 740.644265] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9f5767c-90d5-4f15-8663-95fc48d52922 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.653059] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 740.653059] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524b1410-3f4d-5e60-7139-07652001c04f" [ 740.653059] env[65758]: _type = "Task" [ 740.653059] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.662934] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524b1410-3f4d-5e60-7139-07652001c04f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.683742] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bf4e1d-4c64-4717-bd5c-3e369fd1c671 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.700619] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c457674f-5c94-4797-bbfa-1919f1da56a8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.741643] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "refresh_cache-a0a9d947-f2ad-4a35-b336-1486c9a76b06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.741868] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "refresh_cache-a0a9d947-f2ad-4a35-b336-1486c9a76b06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.742058] env[65758]: DEBUG nova.network.neutron [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 740.747722] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a28dc96-b4cc-4330-bc2b-2ceae92e05a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.754944] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e372023-1ea5-40d0-a0ea-4478582cb3ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.772020] env[65758]: DEBUG nova.compute.provider_tree [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.924525] env[65758]: WARNING neutronclient.v2_0.client [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 740.925487] env[65758]: WARNING openstack [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 740.925865] env[65758]: WARNING openstack [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 741.050390] env[65758]: DEBUG nova.network.neutron [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updated VIF entry in instance network info cache for port fb1e683c-095a-4512-a0a0-ec651a275ab8. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 741.050390] env[65758]: DEBUG nova.network.neutron [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updating instance_info_cache with network_info: [{"id": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "address": "fa:16:3e:f7:f5:02", "network": {"id": "1b53f74f-4a09-45cf-a192-afdf7009dfc5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-171342154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06aa7ad9cf4f4f528687bbd3e6d12b0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb1e683c-09", "ovs_interfaceid": "fb1e683c-095a-4512-a0a0-ec651a275ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 741.072650] env[65758]: INFO nova.compute.manager [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Took 42.97 seconds to build instance. [ 741.165261] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524b1410-3f4d-5e60-7139-07652001c04f, 'name': SearchDatastore_Task, 'duration_secs': 0.013179} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.166313] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42afe3fa-fdb3-4316-a045-ca83499c3b4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.172633] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 741.172633] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f598c8-6c29-e8b4-e679-652470b384a3" [ 741.172633] env[65758]: _type = "Task" [ 741.172633] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.182745] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f598c8-6c29-e8b4-e679-652470b384a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.248559] env[65758]: WARNING openstack [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 741.249016] env[65758]: WARNING openstack [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 741.276714] env[65758]: DEBUG nova.scheduler.client.report [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 741.293066] env[65758]: DEBUG nova.network.neutron [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 741.297186] env[65758]: DEBUG nova.network.neutron [-] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 741.385581] env[65758]: WARNING neutronclient.v2_0.client [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 741.386513] env[65758]: WARNING openstack [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 741.387039] env[65758]: WARNING openstack [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 741.482732] env[65758]: DEBUG nova.network.neutron [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Updating instance_info_cache with network_info: [{"id": "12b480c3-4c9e-4da0-9f51-8b29cd9f54ce", "address": "fa:16:3e:99:36:18", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12b480c3-4c", "ovs_interfaceid": "12b480c3-4c9e-4da0-9f51-8b29cd9f54ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 741.553919] env[65758]: DEBUG oslo_concurrency.lockutils [req-5006b83e-9138-4904-919c-a129817642a0 req-ba3839df-9d99-40e9-b8d0-84cf627e16c1 service nova] Releasing lock "refresh_cache-adc1b956-1b5a-4272-b0ff-95a565e9c45c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.575530] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bedba95c-cc10-4dd2-bf1d-d756f3606aca tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "cca3e019-8e82-4473-8609-291703762a6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.224s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.663256] env[65758]: DEBUG nova.compute.manager [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Received event network-vif-plugged-12b480c3-4c9e-4da0-9f51-8b29cd9f54ce {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 741.663363] env[65758]: DEBUG oslo_concurrency.lockutils [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Acquiring lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.663522] env[65758]: DEBUG oslo_concurrency.lockutils [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.663714] env[65758]: DEBUG oslo_concurrency.lockutils [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.663845] env[65758]: DEBUG nova.compute.manager [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] No waiting events found dispatching network-vif-plugged-12b480c3-4c9e-4da0-9f51-8b29cd9f54ce {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 741.664060] env[65758]: WARNING nova.compute.manager [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Received unexpected event network-vif-plugged-12b480c3-4c9e-4da0-9f51-8b29cd9f54ce for instance with vm_state building and task_state spawning. [ 741.664204] env[65758]: DEBUG nova.compute.manager [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Received event network-changed-12b480c3-4c9e-4da0-9f51-8b29cd9f54ce {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 741.664352] env[65758]: DEBUG nova.compute.manager [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Refreshing instance network info cache due to event network-changed-12b480c3-4c9e-4da0-9f51-8b29cd9f54ce. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 741.664514] env[65758]: DEBUG oslo_concurrency.lockutils [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Acquiring lock "refresh_cache-a0a9d947-f2ad-4a35-b336-1486c9a76b06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.687131] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f598c8-6c29-e8b4-e679-652470b384a3, 'name': SearchDatastore_Task, 'duration_secs': 0.020837} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.687585] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.687852] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1/4fda2aa0-451c-4c0f-a03a-19ea8b083ba1.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 741.688140] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c80538c5-9f72-44eb-9495-c2486200c85b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.697335] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 741.697335] env[65758]: value = "task-4660257" [ 741.697335] env[65758]: _type = "Task" [ 741.697335] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.708352] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660257, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.782833] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.269s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.787184] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.929s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.787528] env[65758]: DEBUG nova.objects.instance [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lazy-loading 'resources' on Instance uuid b6b673e9-0ae1-4c7c-be53-e83641063cf8 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 741.800089] env[65758]: INFO nova.compute.manager [-] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Took 2.43 seconds to deallocate network for instance. [ 741.822707] env[65758]: INFO nova.scheduler.client.report [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Deleted allocations for instance e48a075b-41b3-4612-bd5f-0a158d707a2f [ 741.989059] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "refresh_cache-a0a9d947-f2ad-4a35-b336-1486c9a76b06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.989224] env[65758]: DEBUG nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Instance network_info: |[{"id": "12b480c3-4c9e-4da0-9f51-8b29cd9f54ce", "address": "fa:16:3e:99:36:18", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12b480c3-4c", "ovs_interfaceid": "12b480c3-4c9e-4da0-9f51-8b29cd9f54ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 741.989499] env[65758]: DEBUG oslo_concurrency.lockutils [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Acquired lock "refresh_cache-a0a9d947-f2ad-4a35-b336-1486c9a76b06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.989814] env[65758]: DEBUG nova.network.neutron [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Refreshing network info cache for port 12b480c3-4c9e-4da0-9f51-8b29cd9f54ce {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 741.991169] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:36:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12b480c3-4c9e-4da0-9f51-8b29cd9f54ce', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.999889] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating folder: Project (3e3a324879d646699f950687546ea861). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.001394] env[65758]: WARNING neutronclient.v2_0.client [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 742.002228] env[65758]: WARNING openstack [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 742.002691] env[65758]: WARNING openstack [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 742.010362] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e9613b9-0efd-41b7-8e68-3b5f3e222b76 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.026376] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Created folder: Project (3e3a324879d646699f950687546ea861) in parent group-v909763. [ 742.026595] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating folder: Instances. Parent ref: group-v909873. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.026992] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a62c1d39-17dd-46fd-9f6e-e2fa4a0ef850 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.042663] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Created folder: Instances in parent group-v909873. [ 742.043354] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 742.043736] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 742.044162] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f94f385-1f55-4b9a-a23f-d9ec65b9ac77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.075486] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.075486] env[65758]: value = "task-4660260" [ 742.075486] env[65758]: _type = "Task" [ 742.075486] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.080175] env[65758]: DEBUG nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 742.089776] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660260, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.219383] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660257, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.277379] env[65758]: WARNING neutronclient.v2_0.client [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 742.278480] env[65758]: WARNING openstack [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 742.279176] env[65758]: WARNING openstack [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 742.308912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.337569] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e72009b6-2e7d-45aa-81c4-e18ce2789462 tempest-ServerTagsTestJSON-1164676599 tempest-ServerTagsTestJSON-1164676599-project-member] Lock "e48a075b-41b3-4612-bd5f-0a158d707a2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.744s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.472537] env[65758]: DEBUG nova.network.neutron [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Updated VIF entry in instance network info cache for port 12b480c3-4c9e-4da0-9f51-8b29cd9f54ce. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 742.473112] env[65758]: DEBUG nova.network.neutron [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Updating instance_info_cache with network_info: [{"id": "12b480c3-4c9e-4da0-9f51-8b29cd9f54ce", "address": "fa:16:3e:99:36:18", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12b480c3-4c", "ovs_interfaceid": "12b480c3-4c9e-4da0-9f51-8b29cd9f54ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 742.593337] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660260, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.620711] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.712817] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660257, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.756277} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.713617] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1/4fda2aa0-451c-4c0f-a03a-19ea8b083ba1.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 742.714426] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 742.714426] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30c4ab08-187d-4233-81f1-c6fffb016954 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.725403] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 742.725403] env[65758]: value = "task-4660261" [ 742.725403] env[65758]: _type = "Task" [ 742.725403] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.737077] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660261, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.956880] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b8144f-d959-4430-8be0-4f2c28ace6c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.965261] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0797be0f-facb-4453-8061-da188fb5b29c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.012960] env[65758]: DEBUG oslo_concurrency.lockutils [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] Releasing lock "refresh_cache-a0a9d947-f2ad-4a35-b336-1486c9a76b06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.013531] env[65758]: DEBUG nova.compute.manager [req-06c82cfd-17c3-4d7d-ba08-bf20996d3bfa req-b070b7f7-6334-42c4-91b5-8109536d18ea service nova] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Received event network-vif-deleted-fb1e683c-095a-4512-a0a0-ec651a275ab8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 743.015137] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6501d7-1a8c-40b7-8ab2-ff55bc4f126d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.024566] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebcf646-b391-4068-acff-dc8d03427d88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.040565] env[65758]: DEBUG nova.compute.provider_tree [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.088547] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660260, 'name': CreateVM_Task, 'duration_secs': 0.671496} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.088830] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 743.089249] env[65758]: WARNING neutronclient.v2_0.client [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 743.089635] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.089798] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.090170] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 743.090366] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa0b1cf7-165b-412a-92f1-8bbc9209a9c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.095603] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 743.095603] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e93bdc-1ca6-d4e3-c9c9-2ba549ea2009" [ 743.095603] env[65758]: _type = "Task" [ 743.095603] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.104943] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e93bdc-1ca6-d4e3-c9c9-2ba549ea2009, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.236775] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660261, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076859} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.237412] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 743.238364] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f8378d-e041-49aa-b9cd-5f8e132ee4af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.266240] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1/4fda2aa0-451c-4c0f-a03a-19ea8b083ba1.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 743.267253] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-defda4c6-d003-4c9e-9f31-d388d81e3bc6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.293362] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 743.293362] env[65758]: value = "task-4660262" [ 743.293362] env[65758]: _type = "Task" [ 743.293362] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.305330] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660262, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.544228] env[65758]: DEBUG nova.scheduler.client.report [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.608915] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e93bdc-1ca6-d4e3-c9c9-2ba549ea2009, 'name': SearchDatastore_Task, 'duration_secs': 0.010632} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.609186] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.609799] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.609903] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.610018] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.610279] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.610540] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5dd5f2d-6751-4d62-9f91-4d77bbcd9d58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.626142] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.626367] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.627266] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30d9e797-ddda-489e-aab4-05efcbf07dcd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.635753] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 743.635753] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525cfa5e-7443-f41e-8c9e-702a3866b25d" [ 743.635753] env[65758]: _type = "Task" [ 743.635753] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.645438] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525cfa5e-7443-f41e-8c9e-702a3866b25d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.808864] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660262, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.050698] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.264s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.053924] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.115s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.053924] env[65758]: DEBUG nova.objects.instance [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lazy-loading 'resources' on Instance uuid e60efbcd-1c4e-40a1-8bc1-893daa511073 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 744.093426] env[65758]: INFO nova.scheduler.client.report [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Deleted allocations for instance b6b673e9-0ae1-4c7c-be53-e83641063cf8 [ 744.150778] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525cfa5e-7443-f41e-8c9e-702a3866b25d, 'name': SearchDatastore_Task, 'duration_secs': 0.010757} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.151153] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae898c63-74ce-4ad4-b794-131fcea268a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.158203] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 744.158203] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52894145-2ad4-0559-0ead-cb27b3e71e22" [ 744.158203] env[65758]: _type = "Task" [ 744.158203] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.172145] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52894145-2ad4-0559-0ead-cb27b3e71e22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.306277] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660262, 'name': ReconfigVM_Task, 'duration_secs': 0.580359} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.306950] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1/4fda2aa0-451c-4c0f-a03a-19ea8b083ba1.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 744.307482] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d737f26-c9f2-4d9c-921e-d890c650f51e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.316115] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 744.316115] env[65758]: value = "task-4660263" [ 744.316115] env[65758]: _type = "Task" [ 744.316115] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.325632] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660263, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.604578] env[65758]: DEBUG oslo_concurrency.lockutils [None req-169f9016-772d-4c89-a033-8801a6cb4697 tempest-ServerAddressesTestJSON-126388572 tempest-ServerAddressesTestJSON-126388572-project-member] Lock "b6b673e9-0ae1-4c7c-be53-e83641063cf8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.380s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.672607] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52894145-2ad4-0559-0ead-cb27b3e71e22, 'name': SearchDatastore_Task, 'duration_secs': 0.030151} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.675852] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.675852] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a0a9d947-f2ad-4a35-b336-1486c9a76b06/a0a9d947-f2ad-4a35-b336-1486c9a76b06.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.676244] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d603ba19-62b5-404a-9d77-32fd7408e2de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.684733] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 744.684733] env[65758]: value = "task-4660264" [ 744.684733] env[65758]: _type = "Task" [ 744.684733] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.694497] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.831660] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660263, 'name': Rename_Task, 'duration_secs': 0.288817} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.834805] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 744.835255] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbec8aa9-9c3a-42fc-9614-73b54505e5f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.843649] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 744.843649] env[65758]: value = "task-4660265" [ 744.843649] env[65758]: _type = "Task" [ 744.843649] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.858866] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660265, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.201985] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660264, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.206511] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b511a64c-b521-40db-9dfd-dbb491f948bd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.222325] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a71ec27-d2df-47f8-8109-343e054f19b4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.227351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "596a5005-3607-44a2-9c0e-f1a56865011c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.227351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "596a5005-3607-44a2-9c0e-f1a56865011c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.256576] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c42a88-392c-4f0e-94a3-0ddebbe001d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.265943] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207fbf32-2e7b-4fd5-bebe-ce089d4f2965 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.281700] env[65758]: DEBUG nova.compute.provider_tree [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.289945] env[65758]: INFO nova.compute.manager [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Rescuing [ 745.290130] env[65758]: DEBUG oslo_concurrency.lockutils [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.290913] env[65758]: DEBUG oslo_concurrency.lockutils [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.290913] env[65758]: DEBUG nova.network.neutron [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 745.355938] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660265, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.503965] env[65758]: DEBUG nova.compute.manager [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 745.505046] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b710d4f2-2830-47d3-9b46-c55061b13cb3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.704686] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660264, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53636} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.705579] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a0a9d947-f2ad-4a35-b336-1486c9a76b06/a0a9d947-f2ad-4a35-b336-1486c9a76b06.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 745.705579] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.705579] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68d992c1-a6a9-4ad1-9b9a-147d3e2a8adf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.715613] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 745.715613] env[65758]: value = "task-4660266" [ 745.715613] env[65758]: _type = "Task" [ 745.715613] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.726845] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.787586] env[65758]: DEBUG nova.scheduler.client.report [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.792817] env[65758]: WARNING neutronclient.v2_0.client [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 745.794901] env[65758]: WARNING openstack [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 745.795871] env[65758]: WARNING openstack [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 745.859570] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660265, 'name': PowerOnVM_Task, 'duration_secs': 0.963339} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.860088] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 745.860194] env[65758]: DEBUG nova.compute.manager [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 745.860999] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ae47a3-52cd-4f73-8f88-faa3d4b69a2d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.022881] env[65758]: INFO nova.compute.manager [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] instance snapshotting [ 746.028149] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f90d7fb-c494-4d44-bbf2-df9201dbc4aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.050868] env[65758]: WARNING neutronclient.v2_0.client [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 746.051419] env[65758]: WARNING openstack [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 746.052508] env[65758]: WARNING openstack [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 746.061292] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af41dc5-aee9-42e1-803e-1dce13c688d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.175471] env[65758]: DEBUG nova.network.neutron [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Updating instance_info_cache with network_info: [{"id": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "address": "fa:16:3e:01:ce:ae", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09744327-3d", "ovs_interfaceid": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 746.226436] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076829} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.226707] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 746.227649] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce3419b-b0cf-4641-8122-219164f0d4e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.256547] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] a0a9d947-f2ad-4a35-b336-1486c9a76b06/a0a9d947-f2ad-4a35-b336-1486c9a76b06.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.257298] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb3d1780-36aa-4316-894c-ccdc390fb82c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.279686] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 746.279686] env[65758]: value = "task-4660267" [ 746.279686] env[65758]: _type = "Task" [ 746.279686] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.290059] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.290863] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.238s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.293671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.349s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.295055] env[65758]: INFO nova.compute.claims [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.341774] env[65758]: INFO nova.scheduler.client.report [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Deleted allocations for instance e60efbcd-1c4e-40a1-8bc1-893daa511073 [ 746.374048] env[65758]: INFO nova.compute.manager [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] bringing vm to original state: 'stopped' [ 746.574814] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 746.575190] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-33486606-c0cf-4ca5-935d-e1c6a5c0487c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.584543] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 746.584543] env[65758]: value = "task-4660268" [ 746.584543] env[65758]: _type = "Task" [ 746.584543] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.595694] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660268, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.679023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.791051] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660267, 'name': ReconfigVM_Task, 'duration_secs': 0.330991} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.791377] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Reconfigured VM instance instance-0000002a to attach disk [datastore1] a0a9d947-f2ad-4a35-b336-1486c9a76b06/a0a9d947-f2ad-4a35-b336-1486c9a76b06.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.792076] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-421aa954-728a-4d51-bb17-998aa8f7aceb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.799846] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 746.799846] env[65758]: value = "task-4660269" [ 746.799846] env[65758]: _type = "Task" [ 746.799846] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.815124] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660269, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.855108] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0e83fe5d-8d20-4959-a346-3ac602b7b40d tempest-VolumesAssistedSnapshotsTest-1325605492 tempest-VolumesAssistedSnapshotsTest-1325605492-project-member] Lock "e60efbcd-1c4e-40a1-8bc1-893daa511073" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.035s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.097106] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660268, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.197984] env[65758]: INFO nova.compute.manager [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Rebuilding instance [ 747.252415] env[65758]: DEBUG nova.compute.manager [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 747.253486] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10dd153-c380-4cc4-9f2e-ce309b39b683 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.316981] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660269, 'name': Rename_Task, 'duration_secs': 0.152923} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.316981] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 747.316981] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac4d6f7d-67ff-4b42-8b9b-cc59e9a56425 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.326909] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 747.326909] env[65758]: value = "task-4660270" [ 747.326909] env[65758]: _type = "Task" [ 747.326909] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.339302] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.387596] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.388106] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.388106] env[65758]: DEBUG nova.compute.manager [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 747.389501] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba243db8-4ffb-4d62-a621-85b731daf3a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.397559] env[65758]: DEBUG nova.compute.manager [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 747.600275] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660268, 'name': CreateSnapshot_Task, 'duration_secs': 0.558759} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.600275] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 747.604023] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4f3a16-1a1c-41d4-a481-5d2162b8c12c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.844857] env[65758]: DEBUG oslo_vmware.api [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660270, 'name': PowerOnVM_Task, 'duration_secs': 0.48632} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.845279] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.845552] env[65758]: INFO nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Took 8.19 seconds to spawn the instance on the hypervisor. [ 747.846363] env[65758]: DEBUG nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 747.847061] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d309c63-719f-405a-929a-d4b19bec5ee7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.909539] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 747.910503] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105f2769-3964-4d13-a402-012f1bd66980 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.913611] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cca255f7-13ea-4231-be96-92797b4579d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.923801] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16618b95-684b-423c-bef0-a8e1ab429389 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.928471] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 747.928471] env[65758]: value = "task-4660271" [ 747.928471] env[65758]: _type = "Task" [ 747.928471] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.962667] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022f187c-f20b-4939-b56f-11a9263efe1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.971204] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.977893] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5c7778-4a8a-4972-be6f-31b89fb2aed6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.993909] env[65758]: DEBUG nova.compute.provider_tree [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.123455] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 748.124580] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-06e92657-f3a7-44c1-9bd1-053c6ed695cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.136594] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 748.136594] env[65758]: value = "task-4660272" [ 748.136594] env[65758]: _type = "Task" [ 748.136594] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.146476] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660272, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.233783] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.233783] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e9afcc9-59e0-4f78-9c22-865ea67fe6ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.242867] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 748.242867] env[65758]: value = "task-4660273" [ 748.242867] env[65758]: _type = "Task" [ 748.242867] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.253944] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.277021] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.277021] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88364353-5702-40d4-b4e6-177aa685b913 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.283618] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 748.283618] env[65758]: value = "task-4660274" [ 748.283618] env[65758]: _type = "Task" [ 748.283618] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.296332] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660274, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.372210] env[65758]: INFO nova.compute.manager [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Took 42.50 seconds to build instance. [ 748.448249] env[65758]: DEBUG oslo_vmware.api [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660271, 'name': PowerOffVM_Task, 'duration_secs': 0.280188} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.448797] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.449033] env[65758]: DEBUG nova.compute.manager [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 748.450041] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff666864-612b-4071-b581-563e00e960bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.474295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "148eddf4-4c01-47bc-be81-451ca57e7347" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.474295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "148eddf4-4c01-47bc-be81-451ca57e7347" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.474295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "148eddf4-4c01-47bc-be81-451ca57e7347-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.474295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "148eddf4-4c01-47bc-be81-451ca57e7347-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.474295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "148eddf4-4c01-47bc-be81-451ca57e7347-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.478317] env[65758]: INFO nova.compute.manager [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Terminating instance [ 748.497339] env[65758]: DEBUG nova.scheduler.client.report [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 748.648650] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660272, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.755530] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660273, 'name': PowerOffVM_Task, 'duration_secs': 0.216636} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.757051] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.757051] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b0f9c7-9f51-4a03-ba45-2d7624f1cc7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.778054] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c79c0b-53a6-4e82-8a4c-5c3da3194e7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.801044] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660274, 'name': PowerOffVM_Task, 'duration_secs': 0.284051} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.801391] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.801666] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 748.802508] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25c6cb2-5712-4a89-bd20-484b7ae630f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.812522] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 748.812522] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07c281d1-5d00-4ef0-973f-e1d56cd51f12 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.816659] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.816962] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fafa06a3-f4e0-49c5-b609-bc9f83c54c13 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.824233] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 748.824233] env[65758]: value = "task-4660276" [ 748.824233] env[65758]: _type = "Task" [ 748.824233] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.833825] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.876918] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e83201c5-9ddb-4c0d-b1c3-e33e0f5a796b tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.059s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.900956] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 748.901214] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 748.901386] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleting the datastore file [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.901714] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a138d19-e184-4561-acb0-6695028b3f0c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.910739] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 748.910739] env[65758]: value = "task-4660277" [ 748.910739] env[65758]: _type = "Task" [ 748.910739] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.920373] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.942545] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "03073968-e679-4ce5-9f84-c4765217b308" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.942901] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "03073968-e679-4ce5-9f84-c4765217b308" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.943052] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "03073968-e679-4ce5-9f84-c4765217b308-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.943328] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "03073968-e679-4ce5-9f84-c4765217b308-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.943467] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "03073968-e679-4ce5-9f84-c4765217b308-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.945839] env[65758]: INFO nova.compute.manager [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Terminating instance [ 748.968311] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.580s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.986278] env[65758]: DEBUG nova.compute.manager [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 748.986599] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 748.988590] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647be5a0-9d3b-4b77-953c-97d52291ce50 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.000128] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.000482] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77ea113c-1a90-4fef-8916-801e45c4e3a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.003129] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.004209] env[65758]: DEBUG nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 749.007601] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.005s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.012020] env[65758]: INFO nova.compute.claims [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.018819] env[65758]: DEBUG oslo_vmware.api [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 749.018819] env[65758]: value = "task-4660278" [ 749.018819] env[65758]: _type = "Task" [ 749.018819] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.036283] env[65758]: DEBUG oslo_vmware.api [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.147758] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "d60aaa5c-913f-4550-a4d5-ab994048da9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.148881] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "d60aaa5c-913f-4550-a4d5-ab994048da9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.154123] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660272, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.336635] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 749.337057] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.337481] env[65758]: DEBUG oslo_concurrency.lockutils [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.337481] env[65758]: DEBUG oslo_concurrency.lockutils [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.337632] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.337944] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff565cbf-d97a-424e-93af-19a3e3a91c6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.349386] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.349603] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.350313] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ac3e36-4569-4281-9629-850ebeeb474d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.357316] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 749.357316] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527ebe57-5549-3b5f-289b-e646d27d6bdd" [ 749.357316] env[65758]: _type = "Task" [ 749.357316] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.368267] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527ebe57-5549-3b5f-289b-e646d27d6bdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.379385] env[65758]: DEBUG nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 749.422634] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.298353} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.422634] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 749.422634] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 749.422912] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 749.450400] env[65758]: DEBUG nova.compute.manager [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 749.450672] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.452686] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a21ad2e-0768-4fd1-9220-67badc21010d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.464035] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.464398] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43b862b0-1c3d-4f95-a348-c76baceef679 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.476500] env[65758]: DEBUG oslo_vmware.api [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 749.476500] env[65758]: value = "task-4660279" [ 749.476500] env[65758]: _type = "Task" [ 749.476500] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.482409] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.490994] env[65758]: DEBUG oslo_vmware.api [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.516208] env[65758]: DEBUG nova.compute.utils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 749.517865] env[65758]: DEBUG nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 749.518111] env[65758]: DEBUG nova.network.neutron [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 749.518470] env[65758]: WARNING neutronclient.v2_0.client [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 749.518956] env[65758]: WARNING neutronclient.v2_0.client [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 749.519460] env[65758]: WARNING openstack [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 749.519871] env[65758]: WARNING openstack [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 749.537632] env[65758]: DEBUG oslo_vmware.api [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660278, 'name': PowerOffVM_Task, 'duration_secs': 0.425576} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.537932] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.538063] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.538344] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0676be9-e323-4155-9f36-00d0fb3e6888 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.616775] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 749.617025] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 749.617221] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleting the datastore file [datastore1] 148eddf4-4c01-47bc-be81-451ca57e7347 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.617518] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fc47126-e0ec-4616-b726-fc3311e66c86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.626256] env[65758]: DEBUG oslo_vmware.api [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 749.626256] env[65758]: value = "task-4660281" [ 749.626256] env[65758]: _type = "Task" [ 749.626256] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.636320] env[65758]: DEBUG oslo_vmware.api [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.649716] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660272, 'name': CloneVM_Task} progress is 95%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.873870] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527ebe57-5549-3b5f-289b-e646d27d6bdd, 'name': SearchDatastore_Task, 'duration_secs': 0.013106} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.874930] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d7a01a6-c4af-4e23-950b-678a8049239a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.884242] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 749.884242] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5200a71c-4a1f-5999-d517-4c023d1db70f" [ 749.884242] env[65758]: _type = "Task" [ 749.884242] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.904776] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5200a71c-4a1f-5999-d517-4c023d1db70f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.923127] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.947480] env[65758]: DEBUG nova.policy [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3255ff6ca6744f05b538826519856544', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a4045fe12c0401fbb68bff8def4e9ea', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 749.989964] env[65758]: DEBUG oslo_vmware.api [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660279, 'name': PowerOffVM_Task, 'duration_secs': 0.2972} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.991195] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.991426] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.991705] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84be5a29-979a-4ec9-88df-5f9bff3c04b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.028335] env[65758]: DEBUG nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 750.062877] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.063307] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.063476] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleting the datastore file [datastore1] 03073968-e679-4ce5-9f84-c4765217b308 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.063744] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b3fba54-a92c-4400-a71e-9fe71d5eba28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.074381] env[65758]: DEBUG oslo_vmware.api [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for the task: (returnval){ [ 750.074381] env[65758]: value = "task-4660283" [ 750.074381] env[65758]: _type = "Task" [ 750.074381] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.085654] env[65758]: DEBUG oslo_vmware.api [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.141352] env[65758]: DEBUG oslo_vmware.api [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242781} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.144720] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.144897] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.145058] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.145891] env[65758]: INFO nova.compute.manager [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Took 1.16 seconds to destroy the instance on the hypervisor. [ 750.145891] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 750.148436] env[65758]: DEBUG nova.compute.manager [-] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 750.148573] env[65758]: DEBUG nova.network.neutron [-] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 750.148786] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 750.149308] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 750.149563] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 750.164756] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660272, 'name': CloneVM_Task, 'duration_secs': 1.705948} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.164756] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Created linked-clone VM from snapshot [ 750.164756] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ca1ff3-1321-4a0e-96ad-7ff2cc613ff5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.173054] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Uploading image 1ada059b-91d8-44e9-a5b4-6e50a611cedc {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 750.186875] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 750.187194] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-300d45ca-bfd4-4a69-b829-3ad194645140 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.198194] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 750.198194] env[65758]: value = "task-4660284" [ 750.198194] env[65758]: _type = "Task" [ 750.198194] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.210386] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660284, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.370387] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 750.400572] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5200a71c-4a1f-5999-d517-4c023d1db70f, 'name': SearchDatastore_Task, 'duration_secs': 0.024071} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.404353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.404758] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. {{(pid=65758) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 750.406166] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1aa67c5-2661-47d5-852f-e8b466bd9eb2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.416736] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 750.416736] env[65758]: value = "task-4660285" [ 750.416736] env[65758]: _type = "Task" [ 750.416736] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.428535] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.454030] env[65758]: DEBUG nova.network.neutron [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Successfully created port: add59776-3d93-4cc4-8b79-045e84a073ff {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 750.466116] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 750.466400] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 750.466556] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 750.466746] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 750.466926] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 750.467171] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 750.467424] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 750.467595] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 750.467761] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 750.467917] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 750.468105] env[65758]: DEBUG nova.virt.hardware [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 750.469010] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51fdd8eb-d3dd-47d8-aafd-ece7139b2f0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.482631] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2476cf-e297-434f-b00f-139787019ab3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.505140] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:35:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df4cf195-46a9-4de5-ae34-2363de4377f0', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.515550] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 750.519724] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.519978] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a278dcb9-e2a6-454b-83bf-e7f811671acf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.549790] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.549790] env[65758]: value = "task-4660286" [ 750.549790] env[65758]: _type = "Task" [ 750.549790] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.564877] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660286, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.585040] env[65758]: DEBUG oslo_vmware.api [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Task: {'id': task-4660283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.336902} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.585993] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.585993] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.585993] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.585993] env[65758]: INFO nova.compute.manager [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Took 1.14 seconds to destroy the instance on the hypervisor. [ 750.586254] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 750.586516] env[65758]: DEBUG nova.compute.manager [-] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 750.586660] env[65758]: DEBUG nova.network.neutron [-] [instance: 03073968-e679-4ce5-9f84-c4765217b308] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 750.586962] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 750.587620] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 750.588176] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 750.695424] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4d54f1-196f-4a9e-ad2a-e11a3e19c398 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.713849] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c16c93-650c-4127-b995-d575ba7b3616 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.717722] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660284, 'name': Destroy_Task, 'duration_secs': 0.477177} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.718148] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Destroyed the VM [ 750.718377] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 750.719113] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9322b8ad-d04c-473a-b699-a468074664d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.751395] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a779e5-4925-44ed-a46f-213378f71a16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.754516] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 750.754516] env[65758]: value = "task-4660287" [ 750.754516] env[65758]: _type = "Task" [ 750.754516] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.762900] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d659716-73b7-41c2-8462-a317d4ac1181 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.771145] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660287, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.784282] env[65758]: DEBUG nova.compute.provider_tree [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.930812] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660285, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.046190] env[65758]: DEBUG nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 751.061348] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660286, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.081247] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 751.081581] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.081830] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 751.082102] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.082309] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 751.082512] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 751.082799] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 751.083040] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 751.083301] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 751.083480] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 751.083706] env[65758]: DEBUG nova.virt.hardware [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 751.084968] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bbd4a5-20b4-436f-91e1-170543c4cf75 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.099632] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b105c1-27c3-463a-b575-f2c12fff915a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.255081] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 751.267861] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660287, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.288084] env[65758]: DEBUG nova.scheduler.client.report [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.303674] env[65758]: DEBUG nova.network.neutron [-] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 751.431036] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636774} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.431036] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. [ 751.431422] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955b65af-e548-4c15-b664-bc77e347c993 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.457216] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.457912] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21dd37e6-261f-45b1-865e-964fca31d115 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.477499] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 751.477499] env[65758]: value = "task-4660288" [ 751.477499] env[65758]: _type = "Task" [ 751.477499] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.486830] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660288, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.561646] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660286, 'name': CreateVM_Task, 'duration_secs': 0.731065} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.561646] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.562202] env[65758]: WARNING neutronclient.v2_0.client [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 751.562535] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.562683] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.563074] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 751.563363] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5cb6943-b58d-4a03-8b54-67fe5e476098 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.569339] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 751.569339] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52793ca1-5171-3414-3b88-8308ab759624" [ 751.569339] env[65758]: _type = "Task" [ 751.569339] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.578703] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52793ca1-5171-3414-3b88-8308ab759624, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.769344] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660287, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.793820] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.794478] env[65758]: DEBUG nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 751.797374] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.658s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.798919] env[65758]: INFO nova.compute.claims [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.810686] env[65758]: INFO nova.compute.manager [-] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Took 1.66 seconds to deallocate network for instance. [ 751.993426] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660288, 'name': ReconfigVM_Task, 'duration_secs': 0.359318} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.993733] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Reconfigured VM instance instance-00000021 to attach disk [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.995308] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e859e10-dd3c-4a6f-bcbb-0609104749af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.033647] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f63aea47-00b5-4575-b637-2a70305b86e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.053549] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 752.053549] env[65758]: value = "task-4660289" [ 752.053549] env[65758]: _type = "Task" [ 752.053549] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.064984] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660289, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.081063] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52793ca1-5171-3414-3b88-8308ab759624, 'name': SearchDatastore_Task, 'duration_secs': 0.016956} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.081637] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.081637] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.081838] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.081989] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.082181] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.082520] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ab629a3-f45f-4910-a3a0-ecf5370f6a8d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.094645] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.094645] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.096776] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a2d3179-0a31-431c-8ae8-a3ad099880c5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.104571] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 752.104571] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521fa29b-ea2a-2979-9f86-1b5370a8f2af" [ 752.104571] env[65758]: _type = "Task" [ 752.104571] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.112469] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521fa29b-ea2a-2979-9f86-1b5370a8f2af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.268257] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660287, 'name': RemoveSnapshot_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.311426] env[65758]: DEBUG nova.compute.utils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 752.311426] env[65758]: DEBUG nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 752.311426] env[65758]: DEBUG nova.network.neutron [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 752.311426] env[65758]: WARNING neutronclient.v2_0.client [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 752.311898] env[65758]: WARNING neutronclient.v2_0.client [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 752.312377] env[65758]: WARNING openstack [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 752.312727] env[65758]: WARNING openstack [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 752.322044] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.416471] env[65758]: DEBUG nova.network.neutron [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Successfully updated port: add59776-3d93-4cc4-8b79-045e84a073ff {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 752.432024] env[65758]: DEBUG nova.network.neutron [-] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 752.469659] env[65758]: DEBUG nova.policy [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ed008c3828a4b789c03119fe9831dd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7838f7fee6ec47788731190e718d0db0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 752.565999] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660289, 'name': ReconfigVM_Task, 'duration_secs': 0.168881} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.566302] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.566565] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a81d4064-cecc-48e9-9932-0b7605be343e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.574943] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 752.574943] env[65758]: value = "task-4660290" [ 752.574943] env[65758]: _type = "Task" [ 752.574943] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.584693] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.615917] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521fa29b-ea2a-2979-9f86-1b5370a8f2af, 'name': SearchDatastore_Task, 'duration_secs': 0.012617} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.616825] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75c048e9-30b6-47da-9e08-cf91ee46ab27 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.623310] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 752.623310] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b90c09-c866-99a6-834d-52f608d7fe56" [ 752.623310] env[65758]: _type = "Task" [ 752.623310] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.632957] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b90c09-c866-99a6-834d-52f608d7fe56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.768621] env[65758]: DEBUG oslo_vmware.api [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660287, 'name': RemoveSnapshot_Task, 'duration_secs': 1.701984} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.769048] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 752.817557] env[65758]: DEBUG nova.network.neutron [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Successfully created port: 0510cb2b-8be8-482a-83c4-9743bb78efc1 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 752.822251] env[65758]: DEBUG nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 752.924313] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "refresh_cache-f15c6953-f76b-44eb-bd1b-c0d3adddc163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.924708] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquired lock "refresh_cache-f15c6953-f76b-44eb-bd1b-c0d3adddc163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.925471] env[65758]: DEBUG nova.network.neutron [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 752.936431] env[65758]: INFO nova.compute.manager [-] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Took 2.35 seconds to deallocate network for instance. [ 753.092061] env[65758]: DEBUG oslo_vmware.api [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660290, 'name': PowerOnVM_Task, 'duration_secs': 0.470571} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.096773] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.100846] env[65758]: DEBUG nova.compute.manager [None req-64c461ee-4431-4662-8d14-148aed7c5f12 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 753.101769] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d833314-52c0-4db1-ad76-3d55b6c61479 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.138690] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b90c09-c866-99a6-834d-52f608d7fe56, 'name': SearchDatastore_Task, 'duration_secs': 0.023085} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.138690] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.138690] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.138690] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eaea77c2-a664-4f53-9279-315af3d31bb4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.146985] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 753.146985] env[65758]: value = "task-4660291" [ 753.146985] env[65758]: _type = "Task" [ 753.146985] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.159479] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.276223] env[65758]: WARNING nova.compute.manager [None req-0a183a33-05b2-446f-a107-4f94f3483481 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Image not found during snapshot: nova.exception.ImageNotFound: Image 1ada059b-91d8-44e9-a5b4-6e50a611cedc could not be found. [ 753.429324] env[65758]: WARNING openstack [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 753.429802] env[65758]: WARNING openstack [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 753.446385] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.471423] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b715f4c7-c2f9-41af-89b8-c6dcb32c754b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.480510] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad57052a-8a5a-4473-a319-6b7c0be15b77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.318196] env[65758]: DEBUG nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 754.321281] env[65758]: DEBUG nova.network.neutron [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 754.329340] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292a5a5c-3af9-4811-ab25-a14e1d6fc001 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.343062] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.946714} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.349875] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.350412] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.354616] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79ae7e02-ef37-4870-a7b7-ace1e2e622f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.360607] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6217ac65-b63e-4758-9621-eb6ad77ef5a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.381333] env[65758]: DEBUG nova.compute.provider_tree [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.386698] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 754.386945] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 754.387147] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 754.387360] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 754.387527] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 754.387670] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 754.387892] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 754.388058] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 754.388228] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 754.388381] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 754.388545] env[65758]: DEBUG nova.virt.hardware [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 754.388915] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 754.388915] env[65758]: value = "task-4660292" [ 754.388915] env[65758]: _type = "Task" [ 754.388915] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.390043] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9989d933-8553-472f-835f-d5fb622cef30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.406380] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3ce519-b5b9-4b31-8093-12257c92bfdd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.410635] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660292, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.491958] env[65758]: DEBUG nova.network.neutron [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Successfully updated port: 0510cb2b-8be8-482a-83c4-9743bb78efc1 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 754.613730] env[65758]: DEBUG nova.compute.manager [req-3c1e7d7b-f732-4a87-a155-4d971756331c req-c0afb5b0-8489-4496-9675-d117acf73b59 service nova] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Received event network-vif-deleted-533485bf-4e5b-467a-a80c-4e9867e7efbe {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 754.668900] env[65758]: WARNING neutronclient.v2_0.client [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 754.669860] env[65758]: WARNING openstack [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 754.670253] env[65758]: WARNING openstack [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 754.808953] env[65758]: DEBUG nova.network.neutron [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Updating instance_info_cache with network_info: [{"id": "add59776-3d93-4cc4-8b79-045e84a073ff", "address": "fa:16:3e:77:bc:be", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadd59776-3d", "ovs_interfaceid": "add59776-3d93-4cc4-8b79-045e84a073ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 754.836194] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.836194] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.836194] env[65758]: DEBUG nova.compute.manager [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 754.837879] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a4affe-559d-41e5-a767-cf39d998e0a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.844405] env[65758]: DEBUG nova.compute.manager [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 754.845773] env[65758]: DEBUG nova.objects.instance [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lazy-loading 'flavor' on Instance uuid a0a9d947-f2ad-4a35-b336-1486c9a76b06 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 754.896993] env[65758]: DEBUG nova.scheduler.client.report [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 754.913864] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660292, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088878} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.914169] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.914997] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589f8835-3189-40bb-b985-c42f685064fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.945170] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.945170] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35125385-3c65-4e54-acc7-e72704d9f9c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.968964] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 754.968964] env[65758]: value = "task-4660293" [ 754.968964] env[65758]: _type = "Task" [ 754.968964] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.976975] env[65758]: DEBUG nova.compute.manager [req-b958044d-4f81-4ffe-b1de-fc7e9d7b6cf7 req-a731d12c-2b4f-4c7d-8973-2dd662e34ac8 service nova] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Received event network-vif-deleted-cd42e1b4-a8b0-4277-8fe9-1ea960c938ce {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 754.984079] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660293, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.993825] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquiring lock "refresh_cache-fb379346-f17a-4433-bb55-2b72025e9a61" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.994011] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquired lock "refresh_cache-fb379346-f17a-4433-bb55-2b72025e9a61" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.994246] env[65758]: DEBUG nova.network.neutron [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 755.312090] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Releasing lock "refresh_cache-f15c6953-f76b-44eb-bd1b-c0d3adddc163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.313226] env[65758]: DEBUG nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Instance network_info: |[{"id": "add59776-3d93-4cc4-8b79-045e84a073ff", "address": "fa:16:3e:77:bc:be", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadd59776-3d", "ovs_interfaceid": "add59776-3d93-4cc4-8b79-045e84a073ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 755.313226] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:bc:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'add59776-3d93-4cc4-8b79-045e84a073ff', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.321276] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Creating folder: Project (0a4045fe12c0401fbb68bff8def4e9ea). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.322148] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a22c5b9d-d4ab-49f0-97ed-53c61206d4c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.337381] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Created folder: Project (0a4045fe12c0401fbb68bff8def4e9ea) in parent group-v909763. [ 755.337586] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Creating folder: Instances. Parent ref: group-v909879. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.337859] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be82e8a3-eb36-45c4-9bf2-bd78d0d9da92 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.353696] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Created folder: Instances in parent group-v909879. [ 755.353696] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 755.355977] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.357025] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6fe8ad17-fa1a-4dd3-bad2-59381283217c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.387118] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.387118] env[65758]: value = "task-4660296" [ 755.387118] env[65758]: _type = "Task" [ 755.387118] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.398400] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660296, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.401450] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.604s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.402047] env[65758]: DEBUG nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 755.406436] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.419s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.406436] env[65758]: DEBUG nova.objects.instance [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lazy-loading 'resources' on Instance uuid f7a14628-cc55-41fa-ae89-3958855df8a7 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 755.481655] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660293, 'name': ReconfigVM_Task, 'duration_secs': 0.449222} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.482372] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.482766] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8f00710-e6d4-4f6e-ac52-1d0c6246d4d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.494338] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 755.494338] env[65758]: value = "task-4660297" [ 755.494338] env[65758]: _type = "Task" [ 755.494338] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.497971] env[65758]: WARNING openstack [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.498543] env[65758]: WARNING openstack [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 755.514599] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660297, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.540309] env[65758]: DEBUG nova.network.neutron [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 755.596729] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.596729] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.638837] env[65758]: WARNING neutronclient.v2_0.client [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 755.639664] env[65758]: WARNING openstack [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.643018] env[65758]: WARNING openstack [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 755.768832] env[65758]: DEBUG nova.network.neutron [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Updating instance_info_cache with network_info: [{"id": "0510cb2b-8be8-482a-83c4-9743bb78efc1", "address": "fa:16:3e:1a:29:fa", "network": {"id": "93589b7b-77d7-45dd-8b5c-e842b71e36b4", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2130001451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "7838f7fee6ec47788731190e718d0db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0510cb2b-8b", "ovs_interfaceid": "0510cb2b-8be8-482a-83c4-9743bb78efc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 755.857353] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 755.857782] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c317dfc-74ed-4a3c-96ba-9c2b03fc9428 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.870029] env[65758]: DEBUG oslo_vmware.api [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 755.870029] env[65758]: value = "task-4660298" [ 755.870029] env[65758]: _type = "Task" [ 755.870029] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.879632] env[65758]: DEBUG oslo_vmware.api [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.904035] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660296, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.910303] env[65758]: DEBUG nova.compute.utils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 755.916152] env[65758]: DEBUG nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 755.916152] env[65758]: DEBUG nova.network.neutron [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 755.916152] env[65758]: WARNING neutronclient.v2_0.client [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 755.917107] env[65758]: WARNING neutronclient.v2_0.client [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 755.918271] env[65758]: WARNING openstack [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 755.918271] env[65758]: WARNING openstack [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 756.009312] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660297, 'name': Rename_Task, 'duration_secs': 0.2375} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.013775] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.013775] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e68a1a5-94b4-4fe3-a12c-b44ca62dde58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.024346] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 756.024346] env[65758]: value = "task-4660299" [ 756.024346] env[65758]: _type = "Task" [ 756.024346] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.038086] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660299, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.062881] env[65758]: DEBUG nova.policy [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3255ff6ca6744f05b538826519856544', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a4045fe12c0401fbb68bff8def4e9ea', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 756.274596] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Releasing lock "refresh_cache-fb379346-f17a-4433-bb55-2b72025e9a61" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.274596] env[65758]: DEBUG nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Instance network_info: |[{"id": "0510cb2b-8be8-482a-83c4-9743bb78efc1", "address": "fa:16:3e:1a:29:fa", "network": {"id": "93589b7b-77d7-45dd-8b5c-e842b71e36b4", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2130001451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "7838f7fee6ec47788731190e718d0db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0510cb2b-8b", "ovs_interfaceid": "0510cb2b-8be8-482a-83c4-9743bb78efc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 756.274596] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:29:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0510cb2b-8be8-482a-83c4-9743bb78efc1', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.281551] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Creating folder: Project (7838f7fee6ec47788731190e718d0db0). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.286506] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b56a2729-2fba-4a88-ac5f-d512fc55f982 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.302381] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Created folder: Project (7838f7fee6ec47788731190e718d0db0) in parent group-v909763. [ 756.302549] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Creating folder: Instances. Parent ref: group-v909882. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.303419] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1667949f-fdfb-4c5f-b42f-a0b4105d0607 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.319962] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Created folder: Instances in parent group-v909882. [ 756.319962] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 756.320148] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.320356] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56bc6b06-0ca7-4b5a-ad9f-65820e2bf7e8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.346460] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.346460] env[65758]: value = "task-4660302" [ 756.346460] env[65758]: _type = "Task" [ 756.346460] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.358112] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660302, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.380765] env[65758]: DEBUG oslo_vmware.api [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660298, 'name': PowerOffVM_Task, 'duration_secs': 0.221551} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.384067] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.384454] env[65758]: DEBUG nova.compute.manager [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 756.385564] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05c4eef-66b7-4eaf-b610-5e5f790500a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.403845] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660296, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.416468] env[65758]: DEBUG nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 756.476947] env[65758]: DEBUG nova.network.neutron [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Successfully created port: 31fb2cea-c496-4afb-99ad-ed2c4eb852bc {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 756.480044] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.480338] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.480555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.480779] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.480977] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.484460] env[65758]: INFO nova.compute.manager [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Terminating instance [ 756.540545] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660299, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.587421] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfed7e69-0eff-4fdb-aa21-ca2660a04510 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.596408] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bc9086-9292-4fdf-9f37-11aea49a2840 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.629776] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab5d3ea-6a2f-4cc3-aa0d-3c9c96e89da4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.638276] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdca854-1277-4524-8e18-83d5b6576bdc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.658401] env[65758]: DEBUG nova.compute.provider_tree [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.789606] env[65758]: INFO nova.compute.manager [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Unrescuing [ 756.790008] env[65758]: DEBUG oslo_concurrency.lockutils [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.790197] env[65758]: DEBUG oslo_concurrency.lockutils [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.790396] env[65758]: DEBUG nova.network.neutron [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 756.859911] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660302, 'name': CreateVM_Task, 'duration_secs': 0.361232} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.860067] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.860630] env[65758]: WARNING neutronclient.v2_0.client [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 756.861014] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.861164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.861463] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 756.861783] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a5b9c9b-0764-4179-ac77-8df1bd655ed7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.867602] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 756.867602] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524c6f43-9df3-d0ed-d951-3f394332faaa" [ 756.867602] env[65758]: _type = "Task" [ 756.867602] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.877390] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524c6f43-9df3-d0ed-d951-3f394332faaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.897783] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660296, 'name': CreateVM_Task, 'duration_secs': 1.135658} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.897982] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.899519] env[65758]: WARNING neutronclient.v2_0.client [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 756.899519] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.911038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8bf51c6b-cb92-442d-837b-7eb5c05d9edc tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.075s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.988113] env[65758]: DEBUG nova.compute.manager [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 756.988390] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 756.989333] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1667af6b-ba6f-4e0a-9b02-b7a44239c201 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.997934] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 756.998197] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7cd9c950-2acf-4eeb-b777-2e9c1221e255 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.009387] env[65758]: DEBUG oslo_vmware.api [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 757.009387] env[65758]: value = "task-4660306" [ 757.009387] env[65758]: _type = "Task" [ 757.009387] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.015997] env[65758]: DEBUG oslo_vmware.api [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.036419] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660299, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.162532] env[65758]: DEBUG nova.scheduler.client.report [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.268588] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.268862] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.269088] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.269276] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.269466] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.272038] env[65758]: INFO nova.compute.manager [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Terminating instance [ 757.293388] env[65758]: WARNING neutronclient.v2_0.client [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 757.294105] env[65758]: WARNING openstack [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 757.294476] env[65758]: WARNING openstack [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 757.379209] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524c6f43-9df3-d0ed-d951-3f394332faaa, 'name': SearchDatastore_Task, 'duration_secs': 0.009444} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.379496] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.379813] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.380108] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.380259] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.380434] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.380704] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.381014] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 757.385094] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c239c10-aeaf-4e56-bf45-c4133331c64f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.387137] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-892d2920-ef68-45be-b23e-813bbd7da995 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.393645] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 757.393645] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5263b0be-c34e-2b17-bcf1-d2d8a363b3fc" [ 757.393645] env[65758]: _type = "Task" [ 757.393645] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.398192] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.398381] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.399451] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9b78516-e3e8-4fc9-854c-81dd5526ed05 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.405072] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5263b0be-c34e-2b17-bcf1-d2d8a363b3fc, 'name': SearchDatastore_Task, 'duration_secs': 0.009475} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.405651] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.405914] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.406132] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.409853] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 757.409853] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526d0894-d864-4b64-ac76-235c9b5f78a5" [ 757.409853] env[65758]: _type = "Task" [ 757.409853] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.421194] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526d0894-d864-4b64-ac76-235c9b5f78a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009262} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.422039] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e952335b-b1e2-46dc-81ca-45ddc620fa98 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.427365] env[65758]: DEBUG nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 757.430880] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 757.430880] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526fa31b-e183-698d-5770-a85f75eea28d" [ 757.430880] env[65758]: _type = "Task" [ 757.430880] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.441591] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526fa31b-e183-698d-5770-a85f75eea28d, 'name': SearchDatastore_Task, 'duration_secs': 0.00947} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.441934] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.442494] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] fb379346-f17a-4433-bb55-2b72025e9a61/fb379346-f17a-4433-bb55-2b72025e9a61.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 757.442627] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.442843] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.443100] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86212702-d28e-484e-947f-3e09d1d3af9f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.445214] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92e93aef-921c-474e-b759-c4f3840a10e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.456598] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 757.456858] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 757.457064] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 757.457340] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 757.457560] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 757.457777] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 757.458046] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.458217] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 757.458382] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 757.458537] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 757.458721] env[65758]: DEBUG nova.virt.hardware [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 757.459785] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e5c8e8-2333-4a49-b640-44db93ec941c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.464443] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.464629] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.465430] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 757.465430] env[65758]: value = "task-4660307" [ 757.465430] env[65758]: _type = "Task" [ 757.465430] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.466059] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3521d36f-35c2-484f-84a1-63f30d1bfb39 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.476693] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72f6a44-0b86-4d26-bc1d-36d43085b11d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.482569] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 757.482569] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525909ba-4032-465b-7c22-e1b30d3b4f36" [ 757.482569] env[65758]: _type = "Task" [ 757.482569] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.499089] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660307, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.506196] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525909ba-4032-465b-7c22-e1b30d3b4f36, 'name': SearchDatastore_Task, 'duration_secs': 0.010387} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.507062] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6d2ba02-fe63-4415-b956-35879df0d4a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.517211] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 757.517211] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52cd145b-1641-b063-f04d-1f9902698133" [ 757.517211] env[65758]: _type = "Task" [ 757.517211] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.521069] env[65758]: DEBUG oslo_vmware.api [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660306, 'name': PowerOffVM_Task, 'duration_secs': 0.198024} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.524576] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 757.526374] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 757.526374] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffe45419-55fe-436a-9fa1-ed621fcb9715 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.534985] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52cd145b-1641-b063-f04d-1f9902698133, 'name': SearchDatastore_Task, 'duration_secs': 0.011685} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.535659] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.535942] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] f15c6953-f76b-44eb-bd1b-c0d3adddc163/f15c6953-f76b-44eb-bd1b-c0d3adddc163.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 757.536474] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59c7442a-1508-4c55-91a4-e4a618b94c4d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.541425] env[65758]: DEBUG oslo_vmware.api [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660299, 'name': PowerOnVM_Task, 'duration_secs': 1.132961} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.542114] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.542430] env[65758]: DEBUG nova.compute.manager [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 757.543464] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d6bbab-a070-4413-8f56-ac0a0ba65eff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.547731] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 757.547731] env[65758]: value = "task-4660309" [ 757.547731] env[65758]: _type = "Task" [ 757.547731] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.561950] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.605449] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 757.605866] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 757.606023] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleting the datastore file [datastore2] 9118ff13-e2cf-404c-ae4d-2b9dbc52738d {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 757.606745] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1503b206-90de-40fa-8371-a8e11065d3a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.614924] env[65758]: DEBUG oslo_vmware.api [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 757.614924] env[65758]: value = "task-4660310" [ 757.614924] env[65758]: _type = "Task" [ 757.614924] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.625230] env[65758]: DEBUG oslo_vmware.api [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.669251] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.264s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.672419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.326s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.672624] env[65758]: DEBUG nova.objects.instance [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 757.704454] env[65758]: INFO nova.scheduler.client.report [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Deleted allocations for instance f7a14628-cc55-41fa-ae89-3958855df8a7 [ 757.776310] env[65758]: DEBUG nova.compute.manager [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 757.776480] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 757.777931] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d949a81-0627-4023-a5c4-595a5658b8e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.782793] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.783117] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.783370] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.783921] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.783921] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.784142] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.784389] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.784606] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 757.787481] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.788674] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 757.789200] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10801e4b-c309-475d-b65a-862008c06eb5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.874152] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 757.874552] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 757.874905] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleting the datastore file [datastore1] 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 757.875331] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41e38783-1e10-4ab0-8fd6-999756463f0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.884989] env[65758]: DEBUG oslo_vmware.api [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 757.884989] env[65758]: value = "task-4660312" [ 757.884989] env[65758]: _type = "Task" [ 757.884989] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.901174] env[65758]: DEBUG oslo_vmware.api [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.978524] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660307, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500304} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.978785] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] fb379346-f17a-4433-bb55-2b72025e9a61/fb379346-f17a-4433-bb55-2b72025e9a61.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 757.979017] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.979323] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e87c35e1-0ffe-4538-8139-835c8d313807 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.987053] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 757.987053] env[65758]: value = "task-4660313" [ 757.987053] env[65758]: _type = "Task" [ 757.987053] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.998297] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.070306] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660309, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.071340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.132067] env[65758]: DEBUG oslo_vmware.api [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.410671} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.132479] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 758.132661] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 758.136149] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 758.136149] env[65758]: INFO nova.compute.manager [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 758.136149] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 758.136149] env[65758]: DEBUG nova.compute.manager [-] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 758.136149] env[65758]: DEBUG nova.network.neutron [-] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 758.136149] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 758.136149] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 758.136487] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 758.158213] env[65758]: DEBUG nova.network.neutron [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Successfully updated port: 31fb2cea-c496-4afb-99ad-ed2c4eb852bc {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 758.218189] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a4661f6-e9fe-45a4-a9fb-30029bdd70d2 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "f7a14628-cc55-41fa-ae89-3958855df8a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.669s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.290727] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.322917] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 758.396530] env[65758]: DEBUG oslo_vmware.api [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.497577] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089905} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.497867] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.499035] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6494add6-5877-40fe-9f00-a9162d0c0ce0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.523836] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] fb379346-f17a-4433-bb55-2b72025e9a61/fb379346-f17a-4433-bb55-2b72025e9a61.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.524729] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf49e56d-0834-4a4d-8e8f-6cc3dfabc8da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.547462] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 758.547462] env[65758]: value = "task-4660314" [ 758.547462] env[65758]: _type = "Task" [ 758.547462] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.561809] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660314, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.565638] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.822039} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.566116] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] f15c6953-f76b-44eb-bd1b-c0d3adddc163/f15c6953-f76b-44eb-bd1b-c0d3adddc163.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 758.566372] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 758.566655] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02fc3d27-ca0b-49d3-8711-916c8ed02c01 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.575947] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 758.575947] env[65758]: value = "task-4660315" [ 758.575947] env[65758]: _type = "Task" [ 758.575947] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.588507] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.663404] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "refresh_cache-a662eac8-07e2-47f1-a4dd-9abbe824817d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.663404] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquired lock "refresh_cache-a662eac8-07e2-47f1-a4dd-9abbe824817d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.663404] env[65758]: DEBUG nova.network.neutron [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 758.683355] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d4069ca8-6ba9-4f4f-8d85-bfb04f151c87 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.689766] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.987s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.689874] env[65758]: DEBUG nova.objects.instance [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lazy-loading 'resources' on Instance uuid de8f3600-b25f-4396-af37-ea703587979c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 758.703297] env[65758]: DEBUG nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Received event network-vif-plugged-add59776-3d93-4cc4-8b79-045e84a073ff {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 758.703606] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Acquiring lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.703739] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.704197] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.704197] env[65758]: DEBUG nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] No waiting events found dispatching network-vif-plugged-add59776-3d93-4cc4-8b79-045e84a073ff {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 758.705027] env[65758]: WARNING nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Received unexpected event network-vif-plugged-add59776-3d93-4cc4-8b79-045e84a073ff for instance with vm_state building and task_state spawning. [ 758.705247] env[65758]: DEBUG nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Received event network-changed-add59776-3d93-4cc4-8b79-045e84a073ff {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 758.705482] env[65758]: DEBUG nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Refreshing instance network info cache due to event network-changed-add59776-3d93-4cc4-8b79-045e84a073ff. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 758.705847] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Acquiring lock "refresh_cache-f15c6953-f76b-44eb-bd1b-c0d3adddc163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.705968] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Acquired lock "refresh_cache-f15c6953-f76b-44eb-bd1b-c0d3adddc163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.706135] env[65758]: DEBUG nova.network.neutron [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Refreshing network info cache for port add59776-3d93-4cc4-8b79-045e84a073ff {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 758.731099] env[65758]: WARNING neutronclient.v2_0.client [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 758.731712] env[65758]: WARNING openstack [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 758.732115] env[65758]: WARNING openstack [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 758.910566] env[65758]: DEBUG oslo_vmware.api [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.551998} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.912083] env[65758]: DEBUG nova.network.neutron [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Updating instance_info_cache with network_info: [{"id": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "address": "fa:16:3e:01:ce:ae", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09744327-3d", "ovs_interfaceid": "09744327-3d1a-4d1b-8f3c-8532ca1fef21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 758.914970] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 758.915822] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 758.915822] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 758.915822] env[65758]: INFO nova.compute.manager [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 758.915822] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 758.916449] env[65758]: DEBUG nova.compute.manager [-] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 758.916449] env[65758]: DEBUG nova.network.neutron [-] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 758.916610] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 758.917121] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 758.917457] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 759.059039] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660314, 'name': ReconfigVM_Task, 'duration_secs': 0.456388} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.059342] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Reconfigured VM instance instance-0000002c to attach disk [datastore1] fb379346-f17a-4433-bb55-2b72025e9a61/fb379346-f17a-4433-bb55-2b72025e9a61.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 759.060099] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0e6fea3-bf1f-472d-aed9-a3efd2bdd2a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.072185] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 759.072185] env[65758]: value = "task-4660316" [ 759.072185] env[65758]: _type = "Task" [ 759.072185] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.089026] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130335} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.090028] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 759.090398] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660316, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.091122] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8556655f-3537-4728-80c7-6016667b19dd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.115531] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] f15c6953-f76b-44eb-bd1b-c0d3adddc163/f15c6953-f76b-44eb-bd1b-c0d3adddc163.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.115956] env[65758]: DEBUG nova.network.neutron [-] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 759.117186] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37eb83a6-9362-442f-9dcf-85e83c90e931 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.139581] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 759.139581] env[65758]: value = "task-4660317" [ 759.139581] env[65758]: _type = "Task" [ 759.139581] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.152600] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660317, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.166993] env[65758]: WARNING openstack [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 759.167493] env[65758]: WARNING openstack [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 759.204386] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 759.209038] env[65758]: WARNING neutronclient.v2_0.client [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 759.212789] env[65758]: WARNING openstack [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 759.213202] env[65758]: WARNING openstack [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 759.243481] env[65758]: DEBUG nova.network.neutron [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 759.417376] env[65758]: DEBUG oslo_concurrency.lockutils [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "refresh_cache-ec1e2845-e73a-40ff-9b6c-1d8281859fba" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.417876] env[65758]: DEBUG nova.objects.instance [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lazy-loading 'flavor' on Instance uuid ec1e2845-e73a-40ff-9b6c-1d8281859fba {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 759.589507] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660316, 'name': Rename_Task, 'duration_secs': 0.215817} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.592296] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 759.593885] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0db62a02-f729-44dd-a8c4-ae19175f8bdd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.600477] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 759.600477] env[65758]: value = "task-4660319" [ 759.600477] env[65758]: _type = "Task" [ 759.600477] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.610839] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660319, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.633125] env[65758]: INFO nova.compute.manager [-] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Took 1.50 seconds to deallocate network for instance. [ 759.657453] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.662123] env[65758]: WARNING neutronclient.v2_0.client [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 759.663061] env[65758]: WARNING openstack [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 759.663158] env[65758]: WARNING openstack [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 759.770689] env[65758]: WARNING neutronclient.v2_0.client [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 759.771486] env[65758]: WARNING openstack [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 759.771801] env[65758]: WARNING openstack [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 759.830052] env[65758]: DEBUG nova.network.neutron [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Updating instance_info_cache with network_info: [{"id": "31fb2cea-c496-4afb-99ad-ed2c4eb852bc", "address": "fa:16:3e:21:c1:a6", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fb2cea-c4", "ovs_interfaceid": "31fb2cea-c496-4afb-99ad-ed2c4eb852bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 759.893691] env[65758]: DEBUG nova.network.neutron [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Updated VIF entry in instance network info cache for port add59776-3d93-4cc4-8b79-045e84a073ff. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 759.894273] env[65758]: DEBUG nova.network.neutron [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Updating instance_info_cache with network_info: [{"id": "add59776-3d93-4cc4-8b79-045e84a073ff", "address": "fa:16:3e:77:bc:be", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadd59776-3d", "ovs_interfaceid": "add59776-3d93-4cc4-8b79-045e84a073ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 759.925628] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e685f17-7af1-471a-850f-318210a72695 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.930748] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb83798-2d79-4b07-b413-35d64e382770 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.957337] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 759.957695] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f55e20b1-6b95-4bbc-8937-f47dc0d19063 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.960159] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5465b7-8a2d-40ef-b6f1-896fa5a2b24d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.995219] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abd343b-5923-4ce2-b686-ebee6bc47d6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.998104] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 759.998104] env[65758]: value = "task-4660320" [ 759.998104] env[65758]: _type = "Task" [ 759.998104] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.006180] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037b875c-ee52-49fb-af4c-322182703cce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.015301] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660320, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.026425] env[65758]: DEBUG nova.compute.provider_tree [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.055685] env[65758]: DEBUG nova.network.neutron [-] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 760.077909] env[65758]: DEBUG nova.compute.manager [req-88cb57a3-c3a0-4633-a258-5b3e07458297 req-70665520-4f99-4b26-8995-e92d3551f895 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Received event network-vif-plugged-31fb2cea-c496-4afb-99ad-ed2c4eb852bc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 760.078223] env[65758]: DEBUG oslo_concurrency.lockutils [req-88cb57a3-c3a0-4633-a258-5b3e07458297 req-70665520-4f99-4b26-8995-e92d3551f895 service nova] Acquiring lock "a662eac8-07e2-47f1-a4dd-9abbe824817d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.079063] env[65758]: DEBUG oslo_concurrency.lockutils [req-88cb57a3-c3a0-4633-a258-5b3e07458297 req-70665520-4f99-4b26-8995-e92d3551f895 service nova] Lock "a662eac8-07e2-47f1-a4dd-9abbe824817d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.079063] env[65758]: DEBUG oslo_concurrency.lockutils [req-88cb57a3-c3a0-4633-a258-5b3e07458297 req-70665520-4f99-4b26-8995-e92d3551f895 service nova] Lock "a662eac8-07e2-47f1-a4dd-9abbe824817d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.079063] env[65758]: DEBUG nova.compute.manager [req-88cb57a3-c3a0-4633-a258-5b3e07458297 req-70665520-4f99-4b26-8995-e92d3551f895 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] No waiting events found dispatching network-vif-plugged-31fb2cea-c496-4afb-99ad-ed2c4eb852bc {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 760.082320] env[65758]: WARNING nova.compute.manager [req-88cb57a3-c3a0-4633-a258-5b3e07458297 req-70665520-4f99-4b26-8995-e92d3551f895 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Received unexpected event network-vif-plugged-31fb2cea-c496-4afb-99ad-ed2c4eb852bc for instance with vm_state building and task_state spawning. [ 760.112747] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660319, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.148045] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.152859] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660317, 'name': ReconfigVM_Task, 'duration_secs': 0.759462} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.153065] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Reconfigured VM instance instance-0000002b to attach disk [datastore1] f15c6953-f76b-44eb-bd1b-c0d3adddc163/f15c6953-f76b-44eb-bd1b-c0d3adddc163.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.153839] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45a1b7c7-b2bf-418b-aeff-f31d0dbea175 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.164192] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 760.164192] env[65758]: value = "task-4660321" [ 760.164192] env[65758]: _type = "Task" [ 760.164192] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.175234] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660321, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.339048] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Releasing lock "refresh_cache-a662eac8-07e2-47f1-a4dd-9abbe824817d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.339493] env[65758]: DEBUG nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Instance network_info: |[{"id": "31fb2cea-c496-4afb-99ad-ed2c4eb852bc", "address": "fa:16:3e:21:c1:a6", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fb2cea-c4", "ovs_interfaceid": "31fb2cea-c496-4afb-99ad-ed2c4eb852bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 760.340172] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:c1:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31fb2cea-c496-4afb-99ad-ed2c4eb852bc', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.347758] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 760.348769] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 760.349059] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-010329f0-07b5-4307-84d6-55f04647f079 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.371245] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.371245] env[65758]: value = "task-4660322" [ 760.371245] env[65758]: _type = "Task" [ 760.371245] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.381292] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660322, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.398163] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Releasing lock "refresh_cache-f15c6953-f76b-44eb-bd1b-c0d3adddc163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.398501] env[65758]: DEBUG nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Received event network-vif-plugged-0510cb2b-8be8-482a-83c4-9743bb78efc1 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 760.398675] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Acquiring lock "fb379346-f17a-4433-bb55-2b72025e9a61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.398883] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Lock "fb379346-f17a-4433-bb55-2b72025e9a61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.399254] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Lock "fb379346-f17a-4433-bb55-2b72025e9a61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.399440] env[65758]: DEBUG nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] No waiting events found dispatching network-vif-plugged-0510cb2b-8be8-482a-83c4-9743bb78efc1 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 760.399607] env[65758]: WARNING nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Received unexpected event network-vif-plugged-0510cb2b-8be8-482a-83c4-9743bb78efc1 for instance with vm_state building and task_state spawning. [ 760.399800] env[65758]: DEBUG nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Received event network-changed-0510cb2b-8be8-482a-83c4-9743bb78efc1 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 760.399978] env[65758]: DEBUG nova.compute.manager [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Refreshing instance network info cache due to event network-changed-0510cb2b-8be8-482a-83c4-9743bb78efc1. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 760.400219] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Acquiring lock "refresh_cache-fb379346-f17a-4433-bb55-2b72025e9a61" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.400367] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Acquired lock "refresh_cache-fb379346-f17a-4433-bb55-2b72025e9a61" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.400527] env[65758]: DEBUG nova.network.neutron [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Refreshing network info cache for port 0510cb2b-8be8-482a-83c4-9743bb78efc1 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 760.513393] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660320, 'name': PowerOffVM_Task, 'duration_secs': 0.302017} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.513553] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 760.524586] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Reconfiguring VM instance instance-00000021 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 760.525154] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a2ca608-054d-4e71-b03e-b88c21131e9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.548713] env[65758]: DEBUG nova.scheduler.client.report [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.558519] env[65758]: INFO nova.compute.manager [-] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Took 1.64 seconds to deallocate network for instance. [ 760.569879] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 760.569879] env[65758]: value = "task-4660323" [ 760.569879] env[65758]: _type = "Task" [ 760.569879] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.583043] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660323, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.618032] env[65758]: DEBUG oslo_vmware.api [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660319, 'name': PowerOnVM_Task, 'duration_secs': 0.821862} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.618677] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 760.618978] env[65758]: INFO nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Took 6.30 seconds to spawn the instance on the hypervisor. [ 760.619421] env[65758]: DEBUG nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 760.620788] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc25e2a1-61bd-493f-a196-2e58005f16d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.678452] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660321, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.703333] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "549673ec-3d75-4aad-a001-014f3f53a6b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.704439] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "549673ec-3d75-4aad-a001-014f3f53a6b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.704931] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "549673ec-3d75-4aad-a001-014f3f53a6b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.704931] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "549673ec-3d75-4aad-a001-014f3f53a6b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.704931] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "549673ec-3d75-4aad-a001-014f3f53a6b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.707545] env[65758]: INFO nova.compute.manager [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Terminating instance [ 760.888071] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660322, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.904242] env[65758]: WARNING neutronclient.v2_0.client [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 760.905118] env[65758]: WARNING openstack [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 760.906787] env[65758]: WARNING openstack [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 761.056627] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.367s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.059066] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.811s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.059533] env[65758]: DEBUG nova.objects.instance [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lazy-loading 'resources' on Instance uuid b7692c74-c919-45b4-991b-c06a530ff9ef {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.069508] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.092392] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.096700] env[65758]: INFO nova.scheduler.client.report [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleted allocations for instance de8f3600-b25f-4396-af37-ea703587979c [ 761.104809] env[65758]: WARNING neutronclient.v2_0.client [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 761.105814] env[65758]: WARNING openstack [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 761.105814] env[65758]: WARNING openstack [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 761.153475] env[65758]: INFO nova.compute.manager [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Took 45.17 seconds to build instance. [ 761.180246] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660321, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.210521] env[65758]: DEBUG nova.network.neutron [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Updated VIF entry in instance network info cache for port 0510cb2b-8be8-482a-83c4-9743bb78efc1. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 761.210892] env[65758]: DEBUG nova.network.neutron [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Updating instance_info_cache with network_info: [{"id": "0510cb2b-8be8-482a-83c4-9743bb78efc1", "address": "fa:16:3e:1a:29:fa", "network": {"id": "93589b7b-77d7-45dd-8b5c-e842b71e36b4", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2130001451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "7838f7fee6ec47788731190e718d0db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0510cb2b-8b", "ovs_interfaceid": "0510cb2b-8be8-482a-83c4-9743bb78efc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 761.213060] env[65758]: DEBUG nova.compute.manager [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 761.213264] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.214332] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa279927-4901-4dee-b745-fbe6275724be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.223709] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.224184] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8d17107-2366-4d78-a85a-3a964d4e6140 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.231753] env[65758]: DEBUG oslo_vmware.api [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 761.231753] env[65758]: value = "task-4660324" [ 761.231753] env[65758]: _type = "Task" [ 761.231753] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.241000] env[65758]: DEBUG oslo_vmware.api [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4660324, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.386340] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660322, 'name': CreateVM_Task, 'duration_secs': 0.60361} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.387574] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.388786] env[65758]: DEBUG nova.compute.manager [req-ce83a6ef-9af1-4af0-b1be-634018d0161c req-0504efa7-a89e-4147-832a-8e22f4061eca service nova] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Received event network-vif-deleted-0cad03bd-bdfb-4780-a072-70a72be1d8b2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 761.389367] env[65758]: WARNING neutronclient.v2_0.client [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 761.389713] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.389963] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.390213] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 761.390822] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5308d15-6c1e-4e34-9031-0e2110ba6a94 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.397460] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 761.397460] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526fe6d4-dca6-311b-a1a3-53aba22545a0" [ 761.397460] env[65758]: _type = "Task" [ 761.397460] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.410417] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526fe6d4-dca6-311b-a1a3-53aba22545a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.582790] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.607746] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a5d2f373-b23d-4abd-b2dc-355216322c1a tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "de8f3600-b25f-4396-af37-ea703587979c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.785s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.656697] env[65758]: DEBUG nova.compute.manager [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 761.657269] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2484f6a4-c851-4795-84ed-a5f8dfaef6a7 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "fb379346-f17a-4433-bb55-2b72025e9a61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.380s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.661106] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ee35ab-8b99-40be-9004-829378a837ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.690853] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660321, 'name': Rename_Task, 'duration_secs': 1.244105} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.694377] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.695400] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2443801-486f-48bf-ac33-4922bfb9e594 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.703861] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 761.703861] env[65758]: value = "task-4660326" [ 761.703861] env[65758]: _type = "Task" [ 761.703861] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.718063] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.718279] env[65758]: DEBUG oslo_concurrency.lockutils [req-42538ac9-4d3b-4597-bee6-bac2c86259e3 req-52311be3-f870-4734-974f-e7efa460fd15 service nova] Releasing lock "refresh_cache-fb379346-f17a-4433-bb55-2b72025e9a61" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.748755] env[65758]: DEBUG oslo_vmware.api [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4660324, 'name': PowerOffVM_Task, 'duration_secs': 0.367186} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.753077] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 761.753371] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 761.753973] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08b94824-6071-4005-b16a-a252f46946b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.833416] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 761.833667] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 761.833920] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Deleting the datastore file [datastore2] 549673ec-3d75-4aad-a001-014f3f53a6b0 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 761.834930] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-572eb014-782a-47ed-86c3-d41f0e0ebc1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.844625] env[65758]: DEBUG oslo_vmware.api [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for the task: (returnval){ [ 761.844625] env[65758]: value = "task-4660328" [ 761.844625] env[65758]: _type = "Task" [ 761.844625] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.857706] env[65758]: DEBUG oslo_vmware.api [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4660328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.910766] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526fe6d4-dca6-311b-a1a3-53aba22545a0, 'name': SearchDatastore_Task, 'duration_secs': 0.019238} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.911094] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.911331] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.911557] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.911695] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.911906] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.912580] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e620d6e-f3c7-4652-843a-6d2593d59ff6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.933822] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.933822] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.934198] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77bf5945-c20c-4f2b-9513-d4c8cecf6069 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.941075] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 761.941075] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52feb62d-956b-8178-cf07-53b8ae6eac7d" [ 761.941075] env[65758]: _type = "Task" [ 761.941075] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.952919] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52feb62d-956b-8178-cf07-53b8ae6eac7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.085032] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660323, 'name': ReconfigVM_Task, 'duration_secs': 1.291219} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.085780] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Reconfigured VM instance instance-00000021 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 762.085780] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 762.085959] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1906775-cbbc-41d8-96fe-293fd743e53c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.094398] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 762.094398] env[65758]: value = "task-4660329" [ 762.094398] env[65758]: _type = "Task" [ 762.094398] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.106855] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.144541] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92d00cf-3718-4e97-ab43-2ffcba5c410b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.155197] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38864323-c551-4759-80a2-b92c393917db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.185802] env[65758]: DEBUG nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 762.189492] env[65758]: INFO nova.compute.manager [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] instance snapshotting [ 762.189775] env[65758]: WARNING nova.compute.manager [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 762.194581] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6f1aa7-83bb-418c-b9a8-e46b0b270981 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.196196] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff310f9-8155-45c6-8811-84f7e8e69060 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.226023] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2a26b3-0b45-471f-bad0-b6216bfb87cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.228737] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7882a473-3f2e-417f-819e-157b339d32ef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.243937] env[65758]: DEBUG nova.compute.provider_tree [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.251378] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660326, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.256567] env[65758]: DEBUG nova.scheduler.client.report [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.356925] env[65758]: DEBUG oslo_vmware.api [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Task: {'id': task-4660328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235845} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.357301] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 762.357495] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 762.357665] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 762.357834] env[65758]: INFO nova.compute.manager [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 762.358101] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 762.358318] env[65758]: DEBUG nova.compute.manager [-] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 762.358422] env[65758]: DEBUG nova.network.neutron [-] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 762.358702] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 762.359285] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 762.359549] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 762.386487] env[65758]: DEBUG nova.compute.manager [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Received event network-changed-31fb2cea-c496-4afb-99ad-ed2c4eb852bc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 762.386487] env[65758]: DEBUG nova.compute.manager [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Refreshing instance network info cache due to event network-changed-31fb2cea-c496-4afb-99ad-ed2c4eb852bc. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 762.386487] env[65758]: DEBUG oslo_concurrency.lockutils [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] Acquiring lock "refresh_cache-a662eac8-07e2-47f1-a4dd-9abbe824817d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.386487] env[65758]: DEBUG oslo_concurrency.lockutils [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] Acquired lock "refresh_cache-a662eac8-07e2-47f1-a4dd-9abbe824817d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.386487] env[65758]: DEBUG nova.network.neutron [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Refreshing network info cache for port 31fb2cea-c496-4afb-99ad-ed2c4eb852bc {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 762.409476] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 762.454533] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52feb62d-956b-8178-cf07-53b8ae6eac7d, 'name': SearchDatastore_Task, 'duration_secs': 0.01189} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.455431] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74112d2d-788a-455a-963d-ef6a2975616c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.462481] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 762.462481] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52eafbd1-c507-ec9a-006a-919da861dff6" [ 762.462481] env[65758]: _type = "Task" [ 762.462481] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.472854] env[65758]: INFO nova.compute.manager [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Rebuilding instance [ 762.474938] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52eafbd1-c507-ec9a-006a-919da861dff6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.519558] env[65758]: DEBUG nova.compute.manager [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 762.520935] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d4c814-0c38-4b0a-b138-b62539a887b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.605940] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660329, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.716630] env[65758]: DEBUG oslo_vmware.api [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660326, 'name': PowerOnVM_Task, 'duration_secs': 0.689801} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.717859] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.718209] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.718476] env[65758]: INFO nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Took 11.67 seconds to spawn the instance on the hypervisor. [ 762.718709] env[65758]: DEBUG nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 762.719659] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302b7459-133a-4d09-9af1-3f4eb80bcd4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.763018] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 762.763018] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.704s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.765220] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ecb81f53-91c0-43e3-8341-3a46665d4772 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.768107] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.949s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.769333] env[65758]: INFO nova.compute.claims [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.779207] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 762.779207] env[65758]: value = "task-4660330" [ 762.779207] env[65758]: _type = "Task" [ 762.779207] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.785260] env[65758]: INFO nova.scheduler.client.report [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleted allocations for instance b7692c74-c919-45b4-991b-c06a530ff9ef [ 762.790840] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660330, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.889170] env[65758]: WARNING neutronclient.v2_0.client [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 762.889797] env[65758]: WARNING openstack [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 762.890208] env[65758]: WARNING openstack [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 762.977624] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52eafbd1-c507-ec9a-006a-919da861dff6, 'name': SearchDatastore_Task, 'duration_secs': 0.034952} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.977928] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.978203] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a662eac8-07e2-47f1-a4dd-9abbe824817d/a662eac8-07e2-47f1-a4dd-9abbe824817d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 762.978475] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfdbc3cc-a0ab-40fd-acb2-b9924885e2a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.986900] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 762.986900] env[65758]: value = "task-4660331" [ 762.986900] env[65758]: _type = "Task" [ 762.986900] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.004554] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.087803] env[65758]: WARNING neutronclient.v2_0.client [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 763.089464] env[65758]: WARNING openstack [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 763.089908] env[65758]: WARNING openstack [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 763.106984] env[65758]: DEBUG oslo_vmware.api [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660329, 'name': PowerOnVM_Task, 'duration_secs': 0.699896} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.107272] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 763.107496] env[65758]: DEBUG nova.compute.manager [None req-07d63f0c-1130-4d71-9484-6322e2dd5186 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 763.108281] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00d1546-f041-4860-a6a8-37aca3c81ec5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.139276] env[65758]: DEBUG nova.network.neutron [-] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 763.195658] env[65758]: DEBUG nova.network.neutron [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Updated VIF entry in instance network info cache for port 31fb2cea-c496-4afb-99ad-ed2c4eb852bc. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 763.196062] env[65758]: DEBUG nova.network.neutron [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Updating instance_info_cache with network_info: [{"id": "31fb2cea-c496-4afb-99ad-ed2c4eb852bc", "address": "fa:16:3e:21:c1:a6", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fb2cea-c4", "ovs_interfaceid": "31fb2cea-c496-4afb-99ad-ed2c4eb852bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 763.238689] env[65758]: INFO nova.compute.manager [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Took 49.32 seconds to build instance. [ 763.291258] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660330, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.299475] env[65758]: DEBUG oslo_concurrency.lockutils [None req-19f0a761-3fac-4e72-b52b-6402e6333012 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7692c74-c919-45b4-991b-c06a530ff9ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.749s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.509189] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660331, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.538383] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 763.538751] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98ef08ea-069d-4971-b31c-4e7e7c561a65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.547300] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 763.547300] env[65758]: value = "task-4660332" [ 763.547300] env[65758]: _type = "Task" [ 763.547300] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.558671] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.644139] env[65758]: INFO nova.compute.manager [-] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Took 1.28 seconds to deallocate network for instance. [ 763.700676] env[65758]: DEBUG oslo_concurrency.lockutils [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] Releasing lock "refresh_cache-a662eac8-07e2-47f1-a4dd-9abbe824817d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.701223] env[65758]: DEBUG nova.compute.manager [req-285cf5fd-4f24-455d-8860-a61778541284 req-f88a3042-32b4-4b62-9e54-4ae5abe71375 service nova] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Received event network-vif-deleted-528ce775-8b65-438e-b3a5-647df86651f8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 763.741144] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65130927-e7ba-4f13-8371-48ef166bed5b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.502s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.793837] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660330, 'name': CreateSnapshot_Task, 'duration_secs': 0.947843} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.794610] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 763.795217] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35729b6-0a39-4335-9a52-2af7b5f1887f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.844847] env[65758]: DEBUG nova.compute.manager [req-5786392b-ab12-4b4c-ba5a-d55bdd58d3d8 req-139cdab2-fd94-494f-ae70-3e4919c39342 service nova] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Received event network-vif-deleted-3d50d517-3f1a-4b04-a81d-54672953d4c6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 764.010636] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530894} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.013731] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a662eac8-07e2-47f1-a4dd-9abbe824817d/a662eac8-07e2-47f1-a4dd-9abbe824817d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.014067] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.014797] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6bbacc0a-89f7-4534-b0c8-b4eb2b9547d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.024888] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 764.024888] env[65758]: value = "task-4660334" [ 764.024888] env[65758]: _type = "Task" [ 764.024888] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.045041] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660334, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.062040] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660332, 'name': PowerOffVM_Task, 'duration_secs': 0.337581} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.062423] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 764.062690] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.063569] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91dc2681-e267-4412-aef6-bb40e328c37e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.072366] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 764.075555] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc45579c-b055-4d0b-8901-fb79b8e150a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.154991] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.169498] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.169727] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.169897] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleting the datastore file [datastore1] 2d787237-26e5-4519-9f6e-1d30b9d016cf {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.170206] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05917379-ed4d-49c9-8bc5-06012130e985 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.178367] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 764.178367] env[65758]: value = "task-4660336" [ 764.178367] env[65758]: _type = "Task" [ 764.178367] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.190619] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.244557] env[65758]: DEBUG nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 764.318986] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 764.319367] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d98ac440-2837-483f-9d3c-f811c470b239 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.334593] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 764.334593] env[65758]: value = "task-4660337" [ 764.334593] env[65758]: _type = "Task" [ 764.334593] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.339896] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140674a0-8cd7-4ffd-94e1-85891fde9d0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.348615] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660337, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.351462] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f78b220-c0a8-4a88-8211-e35ac94e1dc6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.384659] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a95e7e-503b-4ee2-a88f-3be9aa9fb554 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.394058] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4907a1-d592-4784-b47c-229551c83f6b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.412049] env[65758]: DEBUG nova.compute.provider_tree [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.533671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "c1b9d81e-d747-4665-a083-26d8383f7645" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.533923] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "c1b9d81e-d747-4665-a083-26d8383f7645" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.541495] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660334, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080353} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.541495] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.543037] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af75a645-ee42-463f-bb3c-3b4ccc096218 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.573050] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] a662eac8-07e2-47f1-a4dd-9abbe824817d/a662eac8-07e2-47f1-a4dd-9abbe824817d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.574961] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a7cdad4-4737-4141-9b71-e3ce60bbc525 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.594894] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "b7323030-4573-4af5-a19a-212a140d642a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.594894] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7323030-4573-4af5-a19a-212a140d642a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.601031] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 764.601031] env[65758]: value = "task-4660338" [ 764.601031] env[65758]: _type = "Task" [ 764.601031] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.611028] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.689435] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.405433} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.689774] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 764.689965] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 764.690163] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 764.775825] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.848701] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660337, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.916391] env[65758]: DEBUG nova.scheduler.client.report [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.114433] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660338, 'name': ReconfigVM_Task, 'duration_secs': 0.463984} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.114748] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Reconfigured VM instance instance-0000002d to attach disk [datastore1] a662eac8-07e2-47f1-a4dd-9abbe824817d/a662eac8-07e2-47f1-a4dd-9abbe824817d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.115664] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44e98b78-030d-491f-94d3-5aa52ab2cb11 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.123808] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 765.123808] env[65758]: value = "task-4660339" [ 765.123808] env[65758]: _type = "Task" [ 765.123808] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.134847] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660339, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.347021] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660337, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.422162] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.423268] env[65758]: DEBUG nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 765.425627] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.605s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.427132] env[65758]: INFO nova.compute.claims [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.635543] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660339, 'name': Rename_Task, 'duration_secs': 0.177897} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.635975] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 765.636320] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b47df848-6492-4e4f-a4cd-16374653e9b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.645586] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 765.645586] env[65758]: value = "task-4660340" [ 765.645586] env[65758]: _type = "Task" [ 765.645586] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.659011] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.729456] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 765.729752] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.730069] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 765.730069] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.730610] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 765.730610] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 765.730610] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 765.731579] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 765.731579] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 765.731579] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 765.732045] env[65758]: DEBUG nova.virt.hardware [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 765.733029] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deff647e-5e75-4d99-9895-75450f9b9ad6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.742994] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18db7a0-f307-4644-9566-dd84bb6b4885 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.760578] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:35:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df4cf195-46a9-4de5-ae34-2363de4377f0', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.768867] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 765.769280] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 765.769532] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c0ed942-0ac4-4f2d-8744-52ae855026aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.790901] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.790901] env[65758]: value = "task-4660341" [ 765.790901] env[65758]: _type = "Task" [ 765.790901] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.800352] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660341, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.846661] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660337, 'name': CloneVM_Task, 'duration_secs': 1.463384} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.846947] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Created linked-clone VM from snapshot [ 765.847763] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6597d497-56f4-4a75-ae23-d6fd89370100 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.857111] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Uploading image 332f38a3-c164-47f7-a225-3e0f0c16aabe {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 765.883337] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 765.883337] env[65758]: value = "vm-909890" [ 765.883337] env[65758]: _type = "VirtualMachine" [ 765.883337] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 765.883337] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d476ec23-6e47-48d6-99b4-4d147f75de77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.891649] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lease: (returnval){ [ 765.891649] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ee9d7f-90ed-b6b8-ff3c-d88d95cfdf30" [ 765.891649] env[65758]: _type = "HttpNfcLease" [ 765.891649] env[65758]: } obtained for exporting VM: (result){ [ 765.891649] env[65758]: value = "vm-909890" [ 765.891649] env[65758]: _type = "VirtualMachine" [ 765.891649] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 765.891946] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the lease: (returnval){ [ 765.891946] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ee9d7f-90ed-b6b8-ff3c-d88d95cfdf30" [ 765.891946] env[65758]: _type = "HttpNfcLease" [ 765.891946] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 765.899424] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 765.899424] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ee9d7f-90ed-b6b8-ff3c-d88d95cfdf30" [ 765.899424] env[65758]: _type = "HttpNfcLease" [ 765.899424] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 765.932477] env[65758]: DEBUG nova.compute.utils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 765.936526] env[65758]: DEBUG nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 765.936738] env[65758]: DEBUG nova.network.neutron [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 765.937086] env[65758]: WARNING neutronclient.v2_0.client [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 765.937412] env[65758]: WARNING neutronclient.v2_0.client [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 765.937981] env[65758]: WARNING openstack [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 765.938391] env[65758]: WARNING openstack [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 766.000423] env[65758]: DEBUG nova.policy [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '650782c5b2a34cb78c4ec4e884fcad82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '296e50c9805843949e592a0ab985d3a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 766.159111] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660340, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.309738] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660341, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.401724] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 766.401724] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ee9d7f-90ed-b6b8-ff3c-d88d95cfdf30" [ 766.401724] env[65758]: _type = "HttpNfcLease" [ 766.401724] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 766.402122] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 766.402122] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ee9d7f-90ed-b6b8-ff3c-d88d95cfdf30" [ 766.402122] env[65758]: _type = "HttpNfcLease" [ 766.402122] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 766.402944] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1132957-fba3-4376-ad14-3af2fc987a6c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.417666] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529d82a6-89e7-cc0c-458c-d2dd209bc33c/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 766.417872] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529d82a6-89e7-cc0c-458c-d2dd209bc33c/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 766.420262] env[65758]: DEBUG nova.network.neutron [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Successfully created port: 1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 766.489158] env[65758]: DEBUG nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 766.561167] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8a5804eb-6331-4b7b-89f2-be78e411148c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.639654] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de36e085-f68c-4597-9ad6-029e4b07561a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.652937] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc37dc26-eff1-4497-bc00-3c12b33330f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.663844] env[65758]: DEBUG oslo_vmware.api [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660340, 'name': PowerOnVM_Task, 'duration_secs': 0.725162} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.692155] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 766.692594] env[65758]: INFO nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Took 9.26 seconds to spawn the instance on the hypervisor. [ 766.693596] env[65758]: DEBUG nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 766.694533] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9055086-ea1b-40fc-8033-881d4072b652 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.699024] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2effd0b2-7b31-4db1-bf9d-bc33682dfa27 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.719182] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692de1eb-c0e0-407e-9601-0f71700fe96e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.746088] env[65758]: DEBUG nova.compute.provider_tree [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.803856] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660341, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.110101] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.110957] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.229511] env[65758]: INFO nova.compute.manager [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Took 50.11 seconds to build instance. [ 767.249135] env[65758]: DEBUG nova.scheduler.client.report [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.307747] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660341, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.509880] env[65758]: DEBUG nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 767.539237] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 767.539811] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.540153] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 767.540501] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.540788] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 767.541113] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 767.541584] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 767.541883] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 767.542241] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 767.542478] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 767.542712] env[65758]: DEBUG nova.virt.hardware [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 767.543746] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de2b23-1766-4f9e-956a-731f1db9ad48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.554240] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1afc550-986e-4383-8ae5-f0a5bf03eaad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.731222] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afc08cd5-c920-408e-bd47-85a68d4e60e8 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "a662eac8-07e2-47f1-a4dd-9abbe824817d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.049s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.755521] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.756665] env[65758]: DEBUG nova.compute.manager [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 767.760705] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.107s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.761264] env[65758]: DEBUG nova.objects.instance [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lazy-loading 'resources' on Instance uuid 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 767.808061] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660341, 'name': CreateVM_Task, 'duration_secs': 1.573566} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.808387] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.809293] env[65758]: WARNING neutronclient.v2_0.client [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 767.809680] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.810087] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.810810] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.811276] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90aa27bd-6a36-4d40-b071-fbbaaa4dddd8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.821855] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 767.821855] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ab070e-1351-3913-b57b-93247b57f94f" [ 767.821855] env[65758]: _type = "Task" [ 767.821855] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.831956] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ab070e-1351-3913-b57b-93247b57f94f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.872371] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquiring lock "fb379346-f17a-4433-bb55-2b72025e9a61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.872701] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "fb379346-f17a-4433-bb55-2b72025e9a61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.873288] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquiring lock "fb379346-f17a-4433-bb55-2b72025e9a61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.873637] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "fb379346-f17a-4433-bb55-2b72025e9a61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.873828] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "fb379346-f17a-4433-bb55-2b72025e9a61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.876243] env[65758]: INFO nova.compute.manager [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Terminating instance [ 768.096992] env[65758]: DEBUG nova.network.neutron [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Successfully updated port: 1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 768.234968] env[65758]: DEBUG nova.compute.manager [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 768.262485] env[65758]: DEBUG nova.compute.utils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 768.264337] env[65758]: DEBUG nova.compute.manager [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 768.337080] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ab070e-1351-3913-b57b-93247b57f94f, 'name': SearchDatastore_Task, 'duration_secs': 0.021657} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.337493] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.337788] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.338098] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.338289] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.338525] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.338829] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebec335f-158d-4c8d-b059-75d053e5a583 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.349667] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.350324] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.350664] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1de5dbbb-3037-4550-9a0e-264f9b38d160 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.360129] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 768.360129] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d5f62f-4236-697f-c81d-657d3e58c15b" [ 768.360129] env[65758]: _type = "Task" [ 768.360129] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.369524] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d5f62f-4236-697f-c81d-657d3e58c15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.383740] env[65758]: DEBUG nova.compute.manager [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 768.384012] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 768.385101] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c669b1c7-2e2a-47c6-b619-be6ecb647880 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.397074] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 768.397659] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c851af50-5679-45c6-91a1-74e2c62fa58e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.407531] env[65758]: DEBUG oslo_vmware.api [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 768.407531] env[65758]: value = "task-4660345" [ 768.407531] env[65758]: _type = "Task" [ 768.407531] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.421364] env[65758]: DEBUG oslo_vmware.api [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.603944] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquiring lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.605443] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquired lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.605443] env[65758]: DEBUG nova.network.neutron [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 768.657065] env[65758]: DEBUG nova.compute.manager [req-6ec8d57d-7c19-42be-9169-9ebdeb9c3854 req-2eefe3b9-0e0e-419b-af58-bb8f14f57526 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Received event network-vif-plugged-1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 768.657317] env[65758]: DEBUG oslo_concurrency.lockutils [req-6ec8d57d-7c19-42be-9169-9ebdeb9c3854 req-2eefe3b9-0e0e-419b-af58-bb8f14f57526 service nova] Acquiring lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.657541] env[65758]: DEBUG oslo_concurrency.lockutils [req-6ec8d57d-7c19-42be-9169-9ebdeb9c3854 req-2eefe3b9-0e0e-419b-af58-bb8f14f57526 service nova] Lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.657671] env[65758]: DEBUG oslo_concurrency.lockutils [req-6ec8d57d-7c19-42be-9169-9ebdeb9c3854 req-2eefe3b9-0e0e-419b-af58-bb8f14f57526 service nova] Lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.657846] env[65758]: DEBUG nova.compute.manager [req-6ec8d57d-7c19-42be-9169-9ebdeb9c3854 req-2eefe3b9-0e0e-419b-af58-bb8f14f57526 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] No waiting events found dispatching network-vif-plugged-1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 768.657983] env[65758]: WARNING nova.compute.manager [req-6ec8d57d-7c19-42be-9169-9ebdeb9c3854 req-2eefe3b9-0e0e-419b-af58-bb8f14f57526 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Received unexpected event network-vif-plugged-1b1c3792-b109-4ead-81ff-2d275ce2dbc7 for instance with vm_state building and task_state spawning. [ 768.758964] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.771886] env[65758]: DEBUG nova.compute.manager [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 768.873799] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d5f62f-4236-697f-c81d-657d3e58c15b, 'name': SearchDatastore_Task, 'duration_secs': 0.013653} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.874666] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bd37737-f013-4b9b-b536-9094288f0ba4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.882411] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 768.882411] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5269278e-6b62-9d10-873d-120f477b7941" [ 768.882411] env[65758]: _type = "Task" [ 768.882411] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.900826] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5269278e-6b62-9d10-873d-120f477b7941, 'name': SearchDatastore_Task} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.901154] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.901884] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 768.902080] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-acbd58ee-6232-446f-9f9a-43e5424eeaf4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.906859] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b7e1ee-8f1e-484b-8cab-2d32dd23b89e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.917894] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 768.917894] env[65758]: value = "task-4660346" [ 768.917894] env[65758]: _type = "Task" [ 768.917894] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.925478] env[65758]: DEBUG oslo_vmware.api [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660345, 'name': PowerOffVM_Task, 'duration_secs': 0.263926} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.930041] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 768.930275] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 768.931438] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426adda6-1c0f-492c-bc50-b23695184544 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.934757] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12618e1d-f148-4873-a903-07ed4803ac34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.943802] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.973575] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe40f68-a575-4e95-9e27-81938b043f28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.983468] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c29a6f-eeb5-4fa4-be0b-37e6db6a24a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.999760] env[65758]: DEBUG nova.compute.provider_tree [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.027268] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 769.027428] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 769.027593] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Deleting the datastore file [datastore1] fb379346-f17a-4433-bb55-2b72025e9a61 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 769.027899] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c0cd1a3-6383-4481-b559-73419615c543 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.036771] env[65758]: DEBUG oslo_vmware.api [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for the task: (returnval){ [ 769.036771] env[65758]: value = "task-4660348" [ 769.036771] env[65758]: _type = "Task" [ 769.036771] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.048898] env[65758]: DEBUG oslo_vmware.api [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.109302] env[65758]: WARNING openstack [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 769.109675] env[65758]: WARNING openstack [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 769.168999] env[65758]: DEBUG nova.network.neutron [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 769.250375] env[65758]: WARNING neutronclient.v2_0.client [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 769.251049] env[65758]: WARNING openstack [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 769.251448] env[65758]: WARNING openstack [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 769.348425] env[65758]: DEBUG nova.network.neutron [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Updating instance_info_cache with network_info: [{"id": "1b1c3792-b109-4ead-81ff-2d275ce2dbc7", "address": "fa:16:3e:83:4f:26", "network": {"id": "f0e3ba82-1389-4626-ac32-5f202383ce8a", "bridge": "br-int", "label": "tempest-ServersTestJSON-50832280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "296e50c9805843949e592a0ab985d3a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1c3792-b1", "ovs_interfaceid": "1b1c3792-b109-4ead-81ff-2d275ce2dbc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 769.437284] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660346, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.480138] env[65758]: DEBUG nova.compute.manager [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 769.481154] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8f70cb-8007-45c1-95c3-3f55001fae39 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.505716] env[65758]: DEBUG nova.scheduler.client.report [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.550712] env[65758]: DEBUG oslo_vmware.api [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Task: {'id': task-4660348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.487544} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.551205] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 769.551430] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 769.551682] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 769.552023] env[65758]: INFO nova.compute.manager [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Took 1.17 seconds to destroy the instance on the hypervisor. [ 769.552432] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 769.552696] env[65758]: DEBUG nova.compute.manager [-] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 769.553229] env[65758]: DEBUG nova.network.neutron [-] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 769.553538] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 769.554447] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 769.554897] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 769.597428] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 769.787109] env[65758]: DEBUG nova.compute.manager [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 769.816791] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 769.817070] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 769.817234] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 769.817421] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 769.817565] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 769.817715] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 769.817937] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 769.818110] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 769.818278] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 769.818475] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 769.818639] env[65758]: DEBUG nova.virt.hardware [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 769.819637] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf84521e-b028-4f53-a35e-914d452011a8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.830170] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3b8e9b-13fd-4eed-bb2f-1b18a8b6a001 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.846513] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.852569] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Creating folder: Project (bdcf20f3b8fc4b5ab09f262074d9e354). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.853279] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Releasing lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.853634] env[65758]: DEBUG nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Instance network_info: |[{"id": "1b1c3792-b109-4ead-81ff-2d275ce2dbc7", "address": "fa:16:3e:83:4f:26", "network": {"id": "f0e3ba82-1389-4626-ac32-5f202383ce8a", "bridge": "br-int", "label": "tempest-ServersTestJSON-50832280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "296e50c9805843949e592a0ab985d3a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1c3792-b1", "ovs_interfaceid": "1b1c3792-b109-4ead-81ff-2d275ce2dbc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 769.853941] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7958c1d1-1c08-4c26-9480-0e6aa1fcb260 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.856132] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:4f:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60e7ee7b-4d02-4d68-af2e-5ab7d9708120', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b1c3792-b109-4ead-81ff-2d275ce2dbc7', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.864176] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Creating folder: Project (296e50c9805843949e592a0ab985d3a3). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.866325] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-321e4764-949c-4c49-9226-b5312d6da9d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.881997] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Created folder: Project (bdcf20f3b8fc4b5ab09f262074d9e354) in parent group-v909763. [ 769.882320] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Creating folder: Instances. Parent ref: group-v909893. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.882564] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da02f449-d31c-4c98-ab6b-11f0ebf0143f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.893554] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Created folder: Project (296e50c9805843949e592a0ab985d3a3) in parent group-v909763. [ 769.893826] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Creating folder: Instances. Parent ref: group-v909894. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.894116] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c73497c-5e95-4b7e-97cc-25ca34cfe64b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.899401] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Created folder: Instances in parent group-v909893. [ 769.899920] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 769.899920] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.900098] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a9fa527-ce23-423c-aa28-198929e11d56 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.915651] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Created folder: Instances in parent group-v909894. [ 769.916137] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 769.916608] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.916902] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a79afe45-1111-4db4-b190-cecebbeabb3c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.934793] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.934793] env[65758]: value = "task-4660353" [ 769.934793] env[65758]: _type = "Task" [ 769.934793] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.944954] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.944954] env[65758]: value = "task-4660354" [ 769.944954] env[65758]: _type = "Task" [ 769.944954] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.955443] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660346, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.948488} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.955443] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660353, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.958752] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 769.958992] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.959343] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9688e58-cab8-4149-81ff-f0edb889820d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.968702] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660354, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.970901] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 769.970901] env[65758]: value = "task-4660355" [ 769.970901] env[65758]: _type = "Task" [ 769.970901] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.983140] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.997425] env[65758]: INFO nova.compute.manager [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] instance snapshotting [ 770.000747] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2846c1cd-5a4e-432c-a911-513842f69b72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.022094] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.261s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.025880] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.680s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.028689] env[65758]: INFO nova.compute.claims [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 770.034738] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0f9ba3-5bfa-4890-95ad-5371d50845b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.065114] env[65758]: INFO nova.scheduler.client.report [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted allocations for instance 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9 [ 770.102026] env[65758]: DEBUG nova.compute.manager [req-ed8f60d0-e52e-4ca5-8261-9b500e1a16d1 req-6d66202d-d918-4a68-a9fa-c3e87b6135df service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Received event network-vif-deleted-0510cb2b-8be8-482a-83c4-9743bb78efc1 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 770.102560] env[65758]: INFO nova.compute.manager [req-ed8f60d0-e52e-4ca5-8261-9b500e1a16d1 req-6d66202d-d918-4a68-a9fa-c3e87b6135df service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Neutron deleted interface 0510cb2b-8be8-482a-83c4-9743bb78efc1; detaching it from the instance and deleting it from the info cache [ 770.103019] env[65758]: DEBUG nova.network.neutron [req-ed8f60d0-e52e-4ca5-8261-9b500e1a16d1 req-6d66202d-d918-4a68-a9fa-c3e87b6135df service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 770.372245] env[65758]: DEBUG nova.network.neutron [-] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 770.451719] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660353, 'name': CreateVM_Task, 'duration_secs': 0.370876} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.452094] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.452710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.452890] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.453443] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.453678] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2603f1f-7733-458b-a3c0-fef492fed496 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.465041] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 770.465041] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f2bb88-2158-137a-05d2-b8b528a753a9" [ 770.465041] env[65758]: _type = "Task" [ 770.465041] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.468857] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660354, 'name': CreateVM_Task, 'duration_secs': 0.49204} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.476358] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.479730] env[65758]: WARNING neutronclient.v2_0.client [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 770.480206] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.491401] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f2bb88-2158-137a-05d2-b8b528a753a9, 'name': SearchDatastore_Task, 'duration_secs': 0.019303} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.494954] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.495222] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 770.495576] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.495664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.495768] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 770.496107] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125777} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.496322] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.496630] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.496866] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85470798-322c-433b-894b-fa209acbec88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.501222] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.501222] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf37c578-cf3e-4911-badc-1f21960b0591 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.502382] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558dab78-67d7-4928-a07f-6a3eb051d489 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.512494] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 770.512494] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5284cd50-6b2c-59c0-d0c0-8069871f3395" [ 770.512494] env[65758]: _type = "Task" [ 770.512494] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.546653] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.557635] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5dcb9dcc-7da0-4586-95e8-6cb54e107f59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.575376] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 770.575584] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 770.578269] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 770.581097] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26360733-3d59-48ba-a573-ab869b37f3b8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.584542] env[65758]: DEBUG oslo_concurrency.lockutils [None req-99fc5ab6-ec47-4d4f-90a4-3eceffeec66b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "8a7f1d79-97ac-4503-a4ed-c99e4f6718c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.105s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.586403] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-72fcc286-6b84-4ae6-892a-77ffe9626c87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.602286] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 770.602286] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52413f83-0792-aaed-0573-752bbb9d5a69" [ 770.602286] env[65758]: _type = "Task" [ 770.602286] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.604303] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5284cd50-6b2c-59c0-d0c0-8069871f3395, 'name': SearchDatastore_Task, 'duration_secs': 0.017466} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.606383] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.606674] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 770.606943] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.607490] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 770.607490] env[65758]: value = "task-4660358" [ 770.607490] env[65758]: _type = "Task" [ 770.607490] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.608173] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 770.608173] env[65758]: value = "task-4660357" [ 770.608173] env[65758]: _type = "Task" [ 770.608173] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.611911] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad16ad71-0604-4468-8c18-b6ad45fe526c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.630664] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660358, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.640384] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660357, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.640752] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52413f83-0792-aaed-0573-752bbb9d5a69, 'name': SearchDatastore_Task, 'duration_secs': 0.015743} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.647028] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd13f4f-edc4-4c9e-9f19-432653e4a746 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.659345] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10371749-47dc-4d8d-8649-6fac204becbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.666604] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 770.666604] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52339902-46a2-a76e-e935-2274f67532e8" [ 770.666604] env[65758]: _type = "Task" [ 770.666604] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.691110] env[65758]: DEBUG nova.compute.manager [req-ed8f60d0-e52e-4ca5-8261-9b500e1a16d1 req-6d66202d-d918-4a68-a9fa-c3e87b6135df service nova] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Detach interface failed, port_id=0510cb2b-8be8-482a-83c4-9743bb78efc1, reason: Instance fb379346-f17a-4433-bb55-2b72025e9a61 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 770.695029] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52339902-46a2-a76e-e935-2274f67532e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.875368] env[65758]: INFO nova.compute.manager [-] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Took 1.32 seconds to deallocate network for instance. [ 770.926371] env[65758]: DEBUG nova.compute.manager [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Received event network-changed-1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 770.926951] env[65758]: DEBUG nova.compute.manager [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Refreshing instance network info cache due to event network-changed-1b1c3792-b109-4ead-81ff-2d275ce2dbc7. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 770.927441] env[65758]: DEBUG oslo_concurrency.lockutils [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] Acquiring lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.927749] env[65758]: DEBUG oslo_concurrency.lockutils [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] Acquired lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.928804] env[65758]: DEBUG nova.network.neutron [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Refreshing network info cache for port 1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 771.139958] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660357, 'name': ReconfigVM_Task, 'duration_secs': 0.363472} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.139958] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660358, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.140182] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf/2d787237-26e5-4519-9f6e-1d30b9d016cf.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.140767] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5211a210-2663-4cbd-a717-dca71954b6de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.148892] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 771.148892] env[65758]: value = "task-4660359" [ 771.148892] env[65758]: _type = "Task" [ 771.148892] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.164071] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660359, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.179959] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52339902-46a2-a76e-e935-2274f67532e8, 'name': SearchDatastore_Task, 'duration_secs': 0.015387} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.180423] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.180602] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 5e54e7f4-3df1-4283-bee1-a7e475051a24/5e54e7f4-3df1-4283-bee1-a7e475051a24.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.180903] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.182274] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.182274] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dff275b-cbab-47b1-9450-470471301d03 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.186951] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc61ba84-4a7a-472e-80fe-ba658317e484 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.195685] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 771.195685] env[65758]: value = "task-4660360" [ 771.195685] env[65758]: _type = "Task" [ 771.195685] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.201573] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.201774] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.205674] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae9aa465-a03c-46ce-8cfb-86ddbbe46033 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.214222] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.219789] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 771.219789] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b9249e-ac3e-808a-60db-97d27233a067" [ 771.219789] env[65758]: _type = "Task" [ 771.219789] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.233018] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b9249e-ac3e-808a-60db-97d27233a067, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.386193] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.432170] env[65758]: WARNING neutronclient.v2_0.client [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 771.433053] env[65758]: WARNING openstack [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 771.433053] env[65758]: WARNING openstack [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 771.626402] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660358, 'name': CreateSnapshot_Task, 'duration_secs': 0.661725} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.629501] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 771.630791] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ff9e91-8556-475a-ac0a-7762958b0650 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.665385] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660359, 'name': Rename_Task, 'duration_secs': 0.170401} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.666498] env[65758]: WARNING neutronclient.v2_0.client [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 771.667393] env[65758]: WARNING openstack [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 771.667912] env[65758]: WARNING openstack [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 771.678215] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.681443] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-221edb71-afe2-45f2-a77b-e6b3eac38879 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.695371] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 771.695371] env[65758]: value = "task-4660361" [ 771.695371] env[65758]: _type = "Task" [ 771.695371] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.707907] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660360, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.711764] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.733064] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b9249e-ac3e-808a-60db-97d27233a067, 'name': SearchDatastore_Task, 'duration_secs': 0.017689} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.734942] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0661e7d5-4eeb-4846-8a33-12baec8c0689 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.742603] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 771.742603] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d2dd6c-6ab4-f599-5030-0af5e6187706" [ 771.742603] env[65758]: _type = "Task" [ 771.742603] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.760651] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d2dd6c-6ab4-f599-5030-0af5e6187706, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.782041] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43bee8e0-1ab9-455e-bb85-46ef436261dc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.795020] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30022119-d870-43b8-bea4-bed7d8b6cbad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.836636] env[65758]: DEBUG nova.network.neutron [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Updated VIF entry in instance network info cache for port 1b1c3792-b109-4ead-81ff-2d275ce2dbc7. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 771.837022] env[65758]: DEBUG nova.network.neutron [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Updating instance_info_cache with network_info: [{"id": "1b1c3792-b109-4ead-81ff-2d275ce2dbc7", "address": "fa:16:3e:83:4f:26", "network": {"id": "f0e3ba82-1389-4626-ac32-5f202383ce8a", "bridge": "br-int", "label": "tempest-ServersTestJSON-50832280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "296e50c9805843949e592a0ab985d3a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1c3792-b1", "ovs_interfaceid": "1b1c3792-b109-4ead-81ff-2d275ce2dbc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 771.839166] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3c1c05-1586-4227-aa67-d9bc50cd7e36 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.849790] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c578010-d5a6-45e5-87ca-6278c7a7dad8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.871319] env[65758]: DEBUG nova.compute.provider_tree [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.158872] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 772.159357] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dd50361d-66d9-4e20-876f-553fd3a1ee21 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.169792] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 772.169792] env[65758]: value = "task-4660363" [ 772.169792] env[65758]: _type = "Task" [ 772.169792] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.180051] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660363, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.209650] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.856286} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.212848] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 5e54e7f4-3df1-4283-bee1-a7e475051a24/5e54e7f4-3df1-4283-bee1-a7e475051a24.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.213048] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.213337] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660361, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.213562] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2a1a963-776d-4627-b844-f38644352e08 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.221216] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 772.221216] env[65758]: value = "task-4660364" [ 772.221216] env[65758]: _type = "Task" [ 772.221216] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.231972] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660364, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.259207] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d2dd6c-6ab4-f599-5030-0af5e6187706, 'name': SearchDatastore_Task, 'duration_secs': 0.070617} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.260659] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.261095] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 47bb5b02-4f84-468e-ad46-2c1c96b65c97/47bb5b02-4f84-468e-ad46-2c1c96b65c97.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 772.261490] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca6642de-7610-41ac-bff4-844755f872df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.271601] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 772.271601] env[65758]: value = "task-4660365" [ 772.271601] env[65758]: _type = "Task" [ 772.271601] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.285051] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660365, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.344892] env[65758]: DEBUG oslo_concurrency.lockutils [req-7d37347a-1431-4079-858b-39c0fc86b9eb req-2e06a034-eecc-4e95-94c3-779442cae5a5 service nova] Releasing lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.375627] env[65758]: DEBUG nova.scheduler.client.report [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 772.445062] env[65758]: DEBUG oslo_concurrency.lockutils [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "e6159a35-f073-4931-b0b0-832a88680356" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.445223] env[65758]: DEBUG oslo_concurrency.lockutils [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.445468] env[65758]: DEBUG nova.compute.manager [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 772.446463] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b91e05-cab6-499b-ae3e-d7ee0b03c83f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.456716] env[65758]: DEBUG nova.compute.manager [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 772.457903] env[65758]: DEBUG nova.objects.instance [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'flavor' on Instance uuid e6159a35-f073-4931-b0b0-832a88680356 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 772.688974] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660363, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.706507] env[65758]: DEBUG oslo_vmware.api [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660361, 'name': PowerOnVM_Task, 'duration_secs': 0.936628} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.706968] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.707281] env[65758]: DEBUG nova.compute.manager [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 772.708245] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d4dac3-a819-4932-864d-fdcbea452615 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.733901] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660364, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082679} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.734232] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 772.735067] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b549ab7b-7fa4-4b21-9ae8-da5e81b174dc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.759856] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 5e54e7f4-3df1-4283-bee1-a7e475051a24/5e54e7f4-3df1-4283-bee1-a7e475051a24.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 772.760384] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c865eedf-6336-4a85-a572-05a0835e5958 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.794585] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660365, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.796690] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 772.796690] env[65758]: value = "task-4660366" [ 772.796690] env[65758]: _type = "Task" [ 772.796690] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.806906] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660366, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.882220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.856s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.883083] env[65758]: DEBUG nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 772.888038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.850s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.889068] env[65758]: DEBUG nova.objects.instance [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lazy-loading 'resources' on Instance uuid 0addcbb1-3561-4c93-b714-37e6b613b962 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 773.182423] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660363, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.228302] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.290055] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660365, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.811983} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.290348] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 47bb5b02-4f84-468e-ad46-2c1c96b65c97/47bb5b02-4f84-468e-ad46-2c1c96b65c97.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.290635] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.290918] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3afab2ba-99e6-4f3a-abc4-d20a48f1092a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.305118] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 773.305118] env[65758]: value = "task-4660367" [ 773.305118] env[65758]: _type = "Task" [ 773.305118] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.313477] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660366, 'name': ReconfigVM_Task, 'duration_secs': 0.383411} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.315743] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 5e54e7f4-3df1-4283-bee1-a7e475051a24/5e54e7f4-3df1-4283-bee1-a7e475051a24.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 773.315743] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4230961-0a35-4530-9f83-c21bb208e797 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.320261] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660367, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.325526] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 773.325526] env[65758]: value = "task-4660368" [ 773.325526] env[65758]: _type = "Task" [ 773.325526] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.335330] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660368, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.393035] env[65758]: DEBUG nova.compute.utils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 773.409329] env[65758]: DEBUG nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 773.409329] env[65758]: DEBUG nova.network.neutron [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 773.409329] env[65758]: WARNING neutronclient.v2_0.client [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 773.409329] env[65758]: WARNING neutronclient.v2_0.client [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 773.409329] env[65758]: WARNING openstack [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 773.409329] env[65758]: WARNING openstack [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 773.467569] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 773.468021] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-789d3df9-778c-46de-8113-4cd4b149840f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.480398] env[65758]: DEBUG oslo_vmware.api [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 773.480398] env[65758]: value = "task-4660369" [ 773.480398] env[65758]: _type = "Task" [ 773.480398] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.491797] env[65758]: DEBUG nova.policy [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f16c6fa73284e8696df370f862e6366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdaabf2897064b5a948dbdb6d5921d76', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 773.497818] env[65758]: DEBUG oslo_vmware.api [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660369, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.686210] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660363, 'name': CloneVM_Task} progress is 95%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.820738] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660367, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13842} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.821151] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 773.822030] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99816242-0762-40dc-ad5c-50f0b8f5097d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.854653] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 47bb5b02-4f84-468e-ad46-2c1c96b65c97/47bb5b02-4f84-468e-ad46-2c1c96b65c97.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.860070] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb02f121-ee5e-42fe-a607-f3d60b881bf7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.883757] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660368, 'name': Rename_Task, 'duration_secs': 0.154691} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.884554] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 773.885901] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e51d0d1f-b3cd-4775-a2be-016ecec5e403 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.892758] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 773.892758] env[65758]: value = "task-4660370" [ 773.892758] env[65758]: _type = "Task" [ 773.892758] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.894423] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 773.894423] env[65758]: value = "task-4660371" [ 773.894423] env[65758]: _type = "Task" [ 773.894423] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.905808] env[65758]: DEBUG nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 773.908521] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660370, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.912286] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.950070] env[65758]: DEBUG nova.network.neutron [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Successfully created port: ea073371-1ad8-47ae-9cca-67a419a8e219 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 773.992342] env[65758]: DEBUG oslo_vmware.api [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660369, 'name': PowerOffVM_Task, 'duration_secs': 0.216114} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.992342] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 773.992459] env[65758]: DEBUG nova.compute.manager [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 773.993231] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174361ff-1b40-46d6-a4d6-ebf52f31b7ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.092646] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e54793-8ed8-46fd-947b-baf054e50ce0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.103505] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89737a2-27ca-4b19-82d1-d76d1b55b2a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.137158] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb229a2d-79e9-4ae9-9ee0-361976b6daaa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.146380] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c02be9-4964-41df-b9d3-299f69d70539 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.162273] env[65758]: DEBUG nova.compute.provider_tree [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.185089] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660363, 'name': CloneVM_Task, 'duration_secs': 1.710898} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.185429] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Created linked-clone VM from snapshot [ 774.186304] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22ce27c-481b-4461-9160-1d92606aefa8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.196132] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Uploading image 7cbb6a07-ab2a-4f4a-8545-e782d07c7e15 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 774.227707] env[65758]: DEBUG oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 774.227707] env[65758]: value = "vm-909900" [ 774.227707] env[65758]: _type = "VirtualMachine" [ 774.227707] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 774.228098] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cbd81466-50fa-402e-81ae-634f7be3ccb7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.237842] env[65758]: DEBUG oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lease: (returnval){ [ 774.237842] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c62d61-1234-b38a-177c-be89c9ed47d7" [ 774.237842] env[65758]: _type = "HttpNfcLease" [ 774.237842] env[65758]: } obtained for exporting VM: (result){ [ 774.237842] env[65758]: value = "vm-909900" [ 774.237842] env[65758]: _type = "VirtualMachine" [ 774.237842] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 774.238184] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the lease: (returnval){ [ 774.238184] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c62d61-1234-b38a-177c-be89c9ed47d7" [ 774.238184] env[65758]: _type = "HttpNfcLease" [ 774.238184] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 774.246558] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 774.246558] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c62d61-1234-b38a-177c-be89c9ed47d7" [ 774.246558] env[65758]: _type = "HttpNfcLease" [ 774.246558] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 774.408097] env[65758]: DEBUG oslo_vmware.api [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660371, 'name': PowerOnVM_Task, 'duration_secs': 0.458102} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.412207] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 774.412635] env[65758]: INFO nova.compute.manager [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Took 4.63 seconds to spawn the instance on the hypervisor. [ 774.412833] env[65758]: DEBUG nova.compute.manager [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 774.413171] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.417322] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e060b3f-99f8-43a5-b351-24b66cb71b50 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.512501] env[65758]: DEBUG oslo_concurrency.lockutils [None req-42a0079b-c08c-4e6e-b005-1bbd9d55b886 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.066s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.667408] env[65758]: DEBUG nova.scheduler.client.report [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.756034] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 774.756034] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c62d61-1234-b38a-177c-be89c9ed47d7" [ 774.756034] env[65758]: _type = "HttpNfcLease" [ 774.756034] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 774.756034] env[65758]: DEBUG oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 774.756034] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c62d61-1234-b38a-177c-be89c9ed47d7" [ 774.756034] env[65758]: _type = "HttpNfcLease" [ 774.756034] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 774.756034] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd8354e-8426-416d-b8d4-e2a102d66c6c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.765832] env[65758]: DEBUG oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc2f93-7d8e-dca4-44d6-c605f50ed68f/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 774.766059] env[65758]: DEBUG oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc2f93-7d8e-dca4-44d6-c605f50ed68f/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 774.903781] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660370, 'name': ReconfigVM_Task, 'duration_secs': 0.610428} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.904127] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 47bb5b02-4f84-468e-ad46-2c1c96b65c97/47bb5b02-4f84-468e-ad46-2c1c96b65c97.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.904939] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-933d67de-e004-4397-8838-ff913a61f418 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.913863] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 774.913863] env[65758]: value = "task-4660373" [ 774.913863] env[65758]: _type = "Task" [ 774.913863] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.922096] env[65758]: DEBUG nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 774.925214] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-58679134-eb59-4399-bdf6-8897627bc7e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.936676] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660373, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.937973] env[65758]: INFO nova.compute.manager [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Took 48.14 seconds to build instance. [ 774.969965] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:16:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='346c523a-8d39-4f4e-a2d8-eb4e1ab4f9a4',id=28,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1141065059',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 774.970234] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 774.970387] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 774.970563] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 774.973283] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 774.973283] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 774.973283] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 774.973283] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 774.973283] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 774.973283] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 774.973283] env[65758]: DEBUG nova.virt.hardware [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 774.974151] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b6806d-6070-420f-a5a8-e739a3d2f4c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.987988] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9d0c12-51d8-4fcd-aac2-b811a8fc2ecc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.176723] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.289s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.179574] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.386s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.181393] env[65758]: INFO nova.compute.claims [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.218514] env[65758]: INFO nova.scheduler.client.report [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Deleted allocations for instance 0addcbb1-3561-4c93-b714-37e6b613b962 [ 775.425936] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660373, 'name': Rename_Task, 'duration_secs': 0.21972} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.426433] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.427555] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f4f46ff-b73e-48fb-b5df-629e52bad8fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.439304] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 775.439304] env[65758]: value = "task-4660374" [ 775.439304] env[65758]: _type = "Task" [ 775.439304] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.439998] env[65758]: DEBUG oslo_concurrency.lockutils [None req-13d3492a-7dde-4c88-a3cc-df6a0ce37cba tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "5e54e7f4-3df1-4283-bee1-a7e475051a24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.961s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.457092] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660374, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.649258] env[65758]: DEBUG nova.network.neutron [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Successfully updated port: ea073371-1ad8-47ae-9cca-67a419a8e219 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 775.734327] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69fdf4c2-5b40-427b-81d8-8094c9325049 tempest-ServersTestFqdnHostnames-1356736416 tempest-ServersTestFqdnHostnames-1356736416-project-member] Lock "0addcbb1-3561-4c93-b714-37e6b613b962" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.814s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.951200] env[65758]: DEBUG nova.compute.manager [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 775.954499] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660374, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.153977] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.153977] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.154296] env[65758]: DEBUG nova.network.neutron [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 776.458990] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660374, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.477741] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.542029] env[65758]: DEBUG nova.compute.manager [req-82e545b3-a36b-4601-8442-90c324ca08c5 req-fbad3d23-c38b-4c30-8850-783f0df20cec service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Received event network-vif-plugged-ea073371-1ad8-47ae-9cca-67a419a8e219 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 776.542475] env[65758]: DEBUG oslo_concurrency.lockutils [req-82e545b3-a36b-4601-8442-90c324ca08c5 req-fbad3d23-c38b-4c30-8850-783f0df20cec service nova] Acquiring lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.542577] env[65758]: DEBUG oslo_concurrency.lockutils [req-82e545b3-a36b-4601-8442-90c324ca08c5 req-fbad3d23-c38b-4c30-8850-783f0df20cec service nova] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.542758] env[65758]: DEBUG oslo_concurrency.lockutils [req-82e545b3-a36b-4601-8442-90c324ca08c5 req-fbad3d23-c38b-4c30-8850-783f0df20cec service nova] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.542979] env[65758]: DEBUG nova.compute.manager [req-82e545b3-a36b-4601-8442-90c324ca08c5 req-fbad3d23-c38b-4c30-8850-783f0df20cec service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] No waiting events found dispatching network-vif-plugged-ea073371-1ad8-47ae-9cca-67a419a8e219 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 776.543185] env[65758]: WARNING nova.compute.manager [req-82e545b3-a36b-4601-8442-90c324ca08c5 req-fbad3d23-c38b-4c30-8850-783f0df20cec service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Received unexpected event network-vif-plugged-ea073371-1ad8-47ae-9cca-67a419a8e219 for instance with vm_state building and task_state spawning. [ 776.657292] env[65758]: WARNING openstack [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 776.658765] env[65758]: WARNING openstack [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 776.724663] env[65758]: DEBUG nova.network.neutron [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 776.797209] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77eb12d5-fd65-40cd-848b-5f18f882f859 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.809368] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c56e282-cd09-461a-89c9-4fdfd4f9742c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.860232] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "54db018a-d54c-4fe5-9a6e-600e801e00b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.860232] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "54db018a-d54c-4fe5-9a6e-600e801e00b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.860232] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "54db018a-d54c-4fe5-9a6e-600e801e00b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.860418] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "54db018a-d54c-4fe5-9a6e-600e801e00b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.861058] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "54db018a-d54c-4fe5-9a6e-600e801e00b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.865224] env[65758]: WARNING neutronclient.v2_0.client [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 776.866265] env[65758]: WARNING openstack [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 776.867344] env[65758]: WARNING openstack [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 776.879343] env[65758]: INFO nova.compute.manager [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Terminating instance [ 776.882249] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02329a8-964e-4ee6-8dbd-7d2c03e95fce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.897276] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cdf1b3-eec7-4c2d-898b-625a29309b98 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.916909] env[65758]: DEBUG nova.compute.provider_tree [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.941321] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Acquiring lock "d42d0818-1486-4696-9871-2cf989aeb885" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.941607] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "d42d0818-1486-4696-9871-2cf989aeb885" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.956712] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660374, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.005686] env[65758]: DEBUG nova.network.neutron [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance_info_cache with network_info: [{"id": "ea073371-1ad8-47ae-9cca-67a419a8e219", "address": "fa:16:3e:e4:10:d3", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea073371-1a", "ovs_interfaceid": "ea073371-1ad8-47ae-9cca-67a419a8e219", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 777.023670] env[65758]: DEBUG nova.objects.instance [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'flavor' on Instance uuid e6159a35-f073-4931-b0b0-832a88680356 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.102697] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529d82a6-89e7-cc0c-458c-d2dd209bc33c/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 777.104090] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85edf32-455b-48c7-9af0-90d16a81accb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.113694] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529d82a6-89e7-cc0c-458c-d2dd209bc33c/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 777.114057] env[65758]: ERROR oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529d82a6-89e7-cc0c-458c-d2dd209bc33c/disk-0.vmdk due to incomplete transfer. [ 777.114434] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bf63ad48-cdfc-4a13-930c-414807ef45ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.126216] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529d82a6-89e7-cc0c-458c-d2dd209bc33c/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 777.126949] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Uploaded image 332f38a3-c164-47f7-a225-3e0f0c16aabe to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 777.129638] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 777.130126] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-967dd33c-2455-48bc-bd25-0e7ea2c187e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.140475] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 777.140475] env[65758]: value = "task-4660375" [ 777.140475] env[65758]: _type = "Task" [ 777.140475] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.153751] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660375, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.390754] env[65758]: DEBUG nova.compute.manager [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 777.391405] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 777.392047] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d084f3c-a3b7-4101-83f1-0ab1148d67b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.401343] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 777.401619] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6460b8f3-76a7-450a-a84a-6b3861c6a068 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.411287] env[65758]: DEBUG oslo_vmware.api [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 777.411287] env[65758]: value = "task-4660376" [ 777.411287] env[65758]: _type = "Task" [ 777.411287] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.420348] env[65758]: DEBUG nova.scheduler.client.report [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.428804] env[65758]: DEBUG oslo_vmware.api [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660376, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.457108] env[65758]: DEBUG oslo_vmware.api [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660374, 'name': PowerOnVM_Task, 'duration_secs': 1.828487} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.457108] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.457108] env[65758]: INFO nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Took 9.95 seconds to spawn the instance on the hypervisor. [ 777.457108] env[65758]: DEBUG nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 777.457761] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ada1975-99b3-4a4f-8baf-42c2b35076f2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.510481] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.510481] env[65758]: DEBUG nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Instance network_info: |[{"id": "ea073371-1ad8-47ae-9cca-67a419a8e219", "address": "fa:16:3e:e4:10:d3", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea073371-1a", "ovs_interfaceid": "ea073371-1ad8-47ae-9cca-67a419a8e219", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 777.510878] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:10:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea073371-1ad8-47ae-9cca-67a419a8e219', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.519061] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 777.519944] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.519944] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bced7aae-cf23-42ca-8151-c4e2d863b11a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.538942] env[65758]: DEBUG oslo_concurrency.lockutils [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.540040] env[65758]: DEBUG oslo_concurrency.lockutils [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.540040] env[65758]: DEBUG nova.network.neutron [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 777.540040] env[65758]: DEBUG nova.objects.instance [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'info_cache' on Instance uuid e6159a35-f073-4931-b0b0-832a88680356 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.545604] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.545604] env[65758]: value = "task-4660377" [ 777.545604] env[65758]: _type = "Task" [ 777.545604] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.555641] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660377, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.652081] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660375, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.711560] env[65758]: INFO nova.compute.manager [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Rebuilding instance [ 777.761577] env[65758]: DEBUG nova.compute.manager [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 777.762786] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0b8f33-c0fa-4dad-807f-f35d3ee0909e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.923453] env[65758]: DEBUG oslo_vmware.api [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660376, 'name': PowerOffVM_Task, 'duration_secs': 0.34651} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.923708] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 777.923873] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 777.924161] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2add920-d6fc-4d20-80aa-bd0523b4ebaf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.930262] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.930894] env[65758]: DEBUG nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 777.934341] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.541s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.936558] env[65758]: INFO nova.compute.claims [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.983839] env[65758]: INFO nova.compute.manager [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Took 52.19 seconds to build instance. [ 778.013888] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 778.013888] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 778.013888] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleting the datastore file [datastore2] 54db018a-d54c-4fe5-9a6e-600e801e00b0 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 778.013888] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d3ca9e6-e0e3-4739-bb94-43c555872085 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.022570] env[65758]: DEBUG oslo_vmware.api [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 778.022570] env[65758]: value = "task-4660379" [ 778.022570] env[65758]: _type = "Task" [ 778.022570] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.034268] env[65758]: DEBUG oslo_vmware.api [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.044491] env[65758]: DEBUG nova.objects.base [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 778.057678] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660377, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.099558] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.099846] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.154892] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660375, 'name': Destroy_Task, 'duration_secs': 0.640127} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.155184] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Destroyed the VM [ 778.155421] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 778.155680] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-325aca73-b557-4e59-b5d7-ee610b58dbba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.164746] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 778.164746] env[65758]: value = "task-4660380" [ 778.164746] env[65758]: _type = "Task" [ 778.164746] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.175269] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660380, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.441225] env[65758]: DEBUG nova.compute.utils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 778.446493] env[65758]: DEBUG nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 778.446493] env[65758]: DEBUG nova.network.neutron [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 778.446493] env[65758]: WARNING neutronclient.v2_0.client [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.447140] env[65758]: WARNING neutronclient.v2_0.client [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.447748] env[65758]: WARNING openstack [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.448339] env[65758]: WARNING openstack [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 778.488666] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4df4bece-9bb7-4bf7-a5fb-9ddfbe09e039 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.863s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.534564] env[65758]: DEBUG oslo_vmware.api [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165342} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.534564] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 778.534966] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 778.534966] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 778.535144] env[65758]: INFO nova.compute.manager [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 778.536109] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 778.536109] env[65758]: DEBUG nova.compute.manager [-] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 778.536109] env[65758]: DEBUG nova.network.neutron [-] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 778.536109] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.536530] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.536791] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 778.547425] env[65758]: WARNING neutronclient.v2_0.client [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.548651] env[65758]: WARNING openstack [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 778.548651] env[65758]: WARNING openstack [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 778.565658] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660377, 'name': CreateVM_Task, 'duration_secs': 0.572974} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.565807] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 778.566355] env[65758]: WARNING neutronclient.v2_0.client [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 778.566788] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.566995] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.567415] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 778.567656] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0b3ff2c-6d8a-4061-a054-b434aa88be7e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.573429] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 778.573429] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52886fc7-0468-5092-84d4-aa9b26518b6b" [ 778.573429] env[65758]: _type = "Task" [ 778.573429] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.583092] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52886fc7-0468-5092-84d4-aa9b26518b6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.676163] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660380, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.780026] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 778.780370] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b35d5a3-6f44-4b7b-9b88-8dd25d85f6f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.789631] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 778.789631] env[65758]: value = "task-4660381" [ 778.789631] env[65758]: _type = "Task" [ 778.789631] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.800832] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.963903] env[65758]: DEBUG nova.policy [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc35770d396c4518991ee34efb212fd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d90d1b4e23241798a6e7e14aa2ebc3d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 778.971312] env[65758]: DEBUG nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 778.993503] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 779.010813] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 779.091234] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52886fc7-0468-5092-84d4-aa9b26518b6b, 'name': SearchDatastore_Task, 'duration_secs': 0.010162} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.091633] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.091872] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 779.092166] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.092338] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.092521] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 779.092801] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfc1ff32-ec1b-4707-bceb-f79c6494a12a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.119629] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 779.119784] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 779.128339] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63e8a49b-ae26-4984-a8d3-1090f000d000 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.136685] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 779.136685] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52abcfa4-80de-c217-c9dc-3313f38c475d" [ 779.136685] env[65758]: _type = "Task" [ 779.136685] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.148085] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52abcfa4-80de-c217-c9dc-3313f38c475d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.177193] env[65758]: DEBUG oslo_vmware.api [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660380, 'name': RemoveSnapshot_Task, 'duration_secs': 0.997604} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.180286] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 779.180598] env[65758]: INFO nova.compute.manager [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Took 16.99 seconds to snapshot the instance on the hypervisor. [ 779.305387] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660381, 'name': PowerOffVM_Task, 'duration_secs': 0.1474} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.305520] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.305734] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 779.306595] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19d1541-c064-48dc-b250-22d73530986d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.315610] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 779.318362] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aaace342-6106-42de-96bb-f5f875dfbd0e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.353230] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 779.353230] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 779.353394] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Deleting the datastore file [datastore2] 5e54e7f4-3df1-4283-bee1-a7e475051a24 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 779.353714] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bd8d766-0265-4b1a-8243-5e40a821463d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.366361] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 779.366361] env[65758]: value = "task-4660383" [ 779.366361] env[65758]: _type = "Task" [ 779.366361] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.379556] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660383, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.406542] env[65758]: DEBUG nova.network.neutron [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Successfully created port: 6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 779.455736] env[65758]: DEBUG nova.compute.manager [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Received event network-changed-ea073371-1ad8-47ae-9cca-67a419a8e219 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 779.458074] env[65758]: DEBUG nova.compute.manager [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Refreshing instance network info cache due to event network-changed-ea073371-1ad8-47ae-9cca-67a419a8e219. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 779.458074] env[65758]: DEBUG oslo_concurrency.lockutils [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] Acquiring lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.458074] env[65758]: DEBUG oslo_concurrency.lockutils [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] Acquired lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.458458] env[65758]: DEBUG nova.network.neutron [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Refreshing network info cache for port ea073371-1ad8-47ae-9cca-67a419a8e219 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 779.526661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.630377] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.630377] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.630377] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.630377] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.630377] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.633496] env[65758]: INFO nova.compute.manager [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Terminating instance [ 779.662269] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52abcfa4-80de-c217-c9dc-3313f38c475d, 'name': SearchDatastore_Task, 'duration_secs': 0.012397} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.670627] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ee5aa79-6449-4408-9eff-c24da7222d15 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.686290] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 779.686290] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520ff9b6-6031-fdca-f0f8-f03bca9adc88" [ 779.686290] env[65758]: _type = "Task" [ 779.686290] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.691244] env[65758]: DEBUG nova.compute.manager [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Instance disappeared during snapshot {{(pid=65758) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4635}} [ 779.711379] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520ff9b6-6031-fdca-f0f8-f03bca9adc88, 'name': SearchDatastore_Task, 'duration_secs': 0.011551} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.717143] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.717429] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a/105c53ce-e657-4a29-bc7f-96b4f885707a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.717976] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ccb6d31-e11e-4d38-84e0-09d0abc6fb0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.722572] env[65758]: DEBUG nova.compute.manager [None req-8aa98619-8790-463f-80b6-871450441822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image not found during clean up 332f38a3-c164-47f7-a225-3e0f0c16aabe {{(pid=65758) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4641}} [ 779.727493] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 779.727493] env[65758]: value = "task-4660384" [ 779.727493] env[65758]: _type = "Task" [ 779.727493] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.746469] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660384, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.803381] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d09c2a-b4a6-4cea-96d8-ad67ac115e29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.813383] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6404cc-bc87-4ae8-8a43-b958662f1ee8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.848511] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11979dfc-a5c8-490e-af7f-e4b5fe142589 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.858223] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40def58-d3bb-4937-925f-6443fff7c0dd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.878959] env[65758]: DEBUG nova.compute.provider_tree [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.889287] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660383, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104664} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.890928] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 779.890928] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 779.890928] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 779.962334] env[65758]: WARNING neutronclient.v2_0.client [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 779.963170] env[65758]: WARNING openstack [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 779.963578] env[65758]: WARNING openstack [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 779.988784] env[65758]: DEBUG nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 780.020449] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 780.020756] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 780.020960] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 780.021192] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 780.021400] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 780.021570] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 780.021819] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 780.022110] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 780.022890] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 780.022890] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 780.022890] env[65758]: DEBUG nova.virt.hardware [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 780.023814] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df1d453-212c-41b3-bca4-b61d4db8847d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.035428] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa434f8-8150-4a32-84e5-dd3e90fcd925 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.062261] env[65758]: DEBUG nova.network.neutron [-] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 780.141023] env[65758]: DEBUG nova.compute.manager [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 780.141023] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 780.141023] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0d027e-023d-491b-86ad-5b484cb885c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.151207] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 780.151576] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ab64470-2e8e-4f32-97e1-63e7c86ee500 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.239856] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660384, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.383904] env[65758]: DEBUG nova.scheduler.client.report [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.513529] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 780.513529] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 780.513529] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleting the datastore file [datastore1] a0a9d947-f2ad-4a35-b336-1486c9a76b06 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.513802] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cf4b3c6-76b0-4f86-b988-98b147d673e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.521820] env[65758]: DEBUG oslo_vmware.api [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 780.521820] env[65758]: value = "task-4660386" [ 780.521820] env[65758]: _type = "Task" [ 780.521820] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.532615] env[65758]: DEBUG oslo_vmware.api [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.564858] env[65758]: INFO nova.compute.manager [-] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Took 2.03 seconds to deallocate network for instance. [ 780.741069] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660384, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636625} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.741344] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a/105c53ce-e657-4a29-bc7f-96b4f885707a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 780.741344] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 780.741621] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b3811a1-af46-4843-8c9b-27fb35c22c23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.751068] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 780.751068] env[65758]: value = "task-4660387" [ 780.751068] env[65758]: _type = "Task" [ 780.751068] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.763755] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660387, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.849260] env[65758]: WARNING neutronclient.v2_0.client [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 780.849993] env[65758]: WARNING openstack [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 780.850400] env[65758]: WARNING openstack [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 780.897168] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.962s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.898989] env[65758]: DEBUG nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 780.905021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.594s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.905021] env[65758]: DEBUG nova.objects.instance [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lazy-loading 'resources' on Instance uuid adc1b956-1b5a-4272-b0ff-95a565e9c45c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.931270] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 780.933206] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 780.933206] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 780.933206] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 780.933206] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 780.933206] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 780.933206] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 780.933206] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 780.933206] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 780.933666] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 780.933666] env[65758]: DEBUG nova.virt.hardware [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 780.934378] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68b7ea8-250a-4a73-8a58-beb3988afd02 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.948015] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8fe99b-9d9f-484d-904c-6c52db05acc7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.964198] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 780.969850] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 780.970180] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 780.970414] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4a323c3-0d4a-4602-8547-4654fd8ca688 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.987923] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 780.987923] env[65758]: value = "task-4660388" [ 780.987923] env[65758]: _type = "Task" [ 780.987923] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.000243] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660388, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.035721] env[65758]: DEBUG oslo_vmware.api [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173456} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.036063] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.036265] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 781.038096] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 781.038096] env[65758]: INFO nova.compute.manager [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Took 0.90 seconds to destroy the instance on the hypervisor. [ 781.038096] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 781.038096] env[65758]: DEBUG nova.compute.manager [-] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 781.038096] env[65758]: DEBUG nova.network.neutron [-] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 781.038096] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 781.038096] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 781.038530] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 781.057389] env[65758]: DEBUG nova.network.neutron [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 781.072033] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.175777] env[65758]: WARNING neutronclient.v2_0.client [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 781.175777] env[65758]: WARNING openstack [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 781.175777] env[65758]: WARNING openstack [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 781.245301] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 781.261684] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660387, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095771} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.262592] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.262922] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2973aedf-b3f2-4925-a04d-59fb1788ff43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.289240] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a/105c53ce-e657-4a29-bc7f-96b4f885707a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.289240] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b569476c-ca4c-4cf0-a7bd-70eaa4ca60ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.310696] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 781.310696] env[65758]: value = "task-4660389" [ 781.310696] env[65758]: _type = "Task" [ 781.310696] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.320174] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.341741] env[65758]: DEBUG nova.network.neutron [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Successfully updated port: 6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 781.406185] env[65758]: DEBUG nova.compute.utils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.416834] env[65758]: DEBUG nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 781.417091] env[65758]: DEBUG nova.network.neutron [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 781.417476] env[65758]: WARNING neutronclient.v2_0.client [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 781.417781] env[65758]: WARNING neutronclient.v2_0.client [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 781.418371] env[65758]: WARNING openstack [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 781.420240] env[65758]: WARNING openstack [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 781.453670] env[65758]: DEBUG nova.network.neutron [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updated VIF entry in instance network info cache for port ea073371-1ad8-47ae-9cca-67a419a8e219. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 781.454028] env[65758]: DEBUG nova.network.neutron [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance_info_cache with network_info: [{"id": "ea073371-1ad8-47ae-9cca-67a419a8e219", "address": "fa:16:3e:e4:10:d3", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea073371-1a", "ovs_interfaceid": "ea073371-1ad8-47ae-9cca-67a419a8e219", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 781.509112] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660388, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.520267] env[65758]: DEBUG nova.policy [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b76c609db8940e3bdcda32d55fa93a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '237226a477354874a363a8670187a1a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.560553] env[65758]: DEBUG oslo_concurrency.lockutils [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.826120] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660389, 'name': ReconfigVM_Task, 'duration_secs': 0.356009} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.826469] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a/105c53ce-e657-4a29-bc7f-96b4f885707a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.827148] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b9a8eda-4fb2-4b63-ae9e-2c406b4da33e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.835964] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 781.835964] env[65758]: value = "task-4660390" [ 781.835964] env[65758]: _type = "Task" [ 781.835964] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.845064] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquiring lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.845366] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquired lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.847605] env[65758]: DEBUG nova.network.neutron [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 781.851232] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660390, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.917855] env[65758]: DEBUG nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 781.957766] env[65758]: DEBUG oslo_concurrency.lockutils [req-4efb9f03-77ee-4592-af7f-f9234428a180 req-7fcd759d-7f9f-4dee-ad8e-2eb7a5f34edf service nova] Releasing lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.990587] env[65758]: DEBUG nova.network.neutron [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Successfully created port: 6cc91558-00db-46cf-a8a1-93f06ecf3e20 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 782.009523] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660388, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.158258] env[65758]: DEBUG nova.network.neutron [-] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 782.167115] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276dd590-3ecd-474d-baca-ad6527de4988 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.177784] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dcf40f-de58-45cb-abf6-b12b9b9706d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.217603] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71621998-c8d6-43ba-aec1-d949e2325acc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.227567] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd2061d-d79b-4785-92c8-aa0325b7965b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.246880] env[65758]: DEBUG nova.compute.provider_tree [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.350776] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660390, 'name': Rename_Task, 'duration_secs': 0.162833} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.353232] env[65758]: WARNING openstack [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 782.357022] env[65758]: WARNING openstack [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 782.362748] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.363719] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02f47fa0-41bc-4303-b9d3-927ada52751c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.366888] env[65758]: DEBUG nova.network.neutron [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Successfully created port: 99223b4e-c230-4330-8b02-e0b49b37f50f {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 782.378237] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 782.378237] env[65758]: value = "task-4660391" [ 782.378237] env[65758]: _type = "Task" [ 782.378237] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.393430] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.466715] env[65758]: DEBUG nova.network.neutron [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 782.506554] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660388, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.572925] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.576099] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a1a771d-993d-477e-9936-99abeecdcda2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.584027] env[65758]: DEBUG oslo_vmware.api [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 782.584027] env[65758]: value = "task-4660392" [ 782.584027] env[65758]: _type = "Task" [ 782.584027] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.594684] env[65758]: DEBUG oslo_vmware.api [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660392, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.661646] env[65758]: INFO nova.compute.manager [-] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Took 1.62 seconds to deallocate network for instance. [ 782.752850] env[65758]: DEBUG nova.scheduler.client.report [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.890925] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660391, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.933800] env[65758]: DEBUG nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 782.966432] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 782.966749] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.966913] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 782.967116] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.967297] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 782.967493] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 782.967707] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 782.967860] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 782.968026] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 782.968187] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 782.968353] env[65758]: DEBUG nova.virt.hardware [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 782.969301] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e956b6e3-9864-4635-b735-02aa1f1cc510 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.980118] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e20df02-73de-4643-a52b-87d65fdbdac7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.008215] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660388, 'name': CreateVM_Task, 'duration_secs': 1.561892} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.008475] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 783.008998] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.009164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.009501] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 783.009821] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ab7288b-b537-4a13-a2c0-b9b03b3afea0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.016361] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 783.016361] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e8ce55-ec5d-87e8-1d43-64642f963ec6" [ 783.016361] env[65758]: _type = "Task" [ 783.016361] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.027613] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e8ce55-ec5d-87e8-1d43-64642f963ec6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.093994] env[65758]: DEBUG oslo_vmware.api [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660392, 'name': PowerOnVM_Task, 'duration_secs': 0.398412} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.094387] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.094630] env[65758]: DEBUG nova.compute.manager [None req-09cd75a9-310b-4e93-aaed-0979ba63bfdf tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 783.095698] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f57be86-fe9e-4da3-94f1-d0d5f1a20516 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.170671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.257649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.355s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.261775] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.640s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.263102] env[65758]: INFO nova.compute.claims [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.306845] env[65758]: INFO nova.scheduler.client.report [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Deleted allocations for instance adc1b956-1b5a-4272-b0ff-95a565e9c45c [ 783.388412] env[65758]: DEBUG oslo_vmware.api [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660391, 'name': PowerOnVM_Task, 'duration_secs': 0.654503} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.388714] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.389098] env[65758]: INFO nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Took 8.47 seconds to spawn the instance on the hypervisor. [ 783.389327] env[65758]: DEBUG nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 783.390613] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad5c2a0-c55b-479c-82ae-849269558939 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.532365] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e8ce55-ec5d-87e8-1d43-64642f963ec6, 'name': SearchDatastore_Task, 'duration_secs': 0.013312} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.532837] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.535477] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.535477] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.535477] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.535805] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.536185] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d1c8d47-281c-4cca-a14f-20e8c9cc43a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.553840] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.554362] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 783.555664] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5964d4a7-9ed2-48d3-9a2f-a39c7fd7454e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.565487] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 783.565487] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5218eea6-49a1-7694-9968-ee2fbd4c2c43" [ 783.565487] env[65758]: _type = "Task" [ 783.565487] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.575165] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5218eea6-49a1-7694-9968-ee2fbd4c2c43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.820310] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9e228353-79ac-4ca6-8f32-11ef23dba1bc tempest-AttachInterfacesUnderV243Test-504714783 tempest-AttachInterfacesUnderV243Test-504714783-project-member] Lock "adc1b956-1b5a-4272-b0ff-95a565e9c45c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.110s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.893297] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "96103549-80a5-462d-9f73-f5f6363ab9fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.893643] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "96103549-80a5-462d-9f73-f5f6363ab9fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.912674] env[65758]: INFO nova.compute.manager [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Took 50.60 seconds to build instance. [ 784.068278] env[65758]: DEBUG nova.network.neutron [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Successfully updated port: 6cc91558-00db-46cf-a8a1-93f06ecf3e20 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 784.080571] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5218eea6-49a1-7694-9968-ee2fbd4c2c43, 'name': SearchDatastore_Task, 'duration_secs': 0.023409} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.081765] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5f989de-fca3-42fb-8ea7-f863237815d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.094394] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 784.094394] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52dd25ed-a924-bf83-187a-2fd4552cdca8" [ 784.094394] env[65758]: _type = "Task" [ 784.094394] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.106624] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dd25ed-a924-bf83-187a-2fd4552cdca8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.146597] env[65758]: DEBUG nova.compute.manager [req-7ea6778e-3eee-428c-9a41-e9131c406610 req-193b5c3f-00a8-4796-b896-3ea4462740df service nova] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Received event network-vif-deleted-12074e98-5413-4e8e-bedf-73bb6ccc2248 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 784.165830] env[65758]: WARNING neutronclient.v2_0.client [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 784.165933] env[65758]: WARNING openstack [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 784.166561] env[65758]: WARNING openstack [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 784.275770] env[65758]: DEBUG nova.network.neutron [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Updating instance_info_cache with network_info: [{"id": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "address": "fa:16:3e:d6:80:6a", "network": {"id": "d7cf91d0-3744-4e97-b6c8-5d962fe28826", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-207695677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d90d1b4e23241798a6e7e14aa2ebc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6691e2d5-9b", "ovs_interfaceid": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 784.393563] env[65758]: DEBUG nova.compute.manager [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Received event network-changed-1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 784.393758] env[65758]: DEBUG nova.compute.manager [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Refreshing instance network info cache due to event network-changed-1b1c3792-b109-4ead-81ff-2d275ce2dbc7. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 784.393974] env[65758]: DEBUG oslo_concurrency.lockutils [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] Acquiring lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.394144] env[65758]: DEBUG oslo_concurrency.lockutils [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] Acquired lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.395305] env[65758]: DEBUG nova.network.neutron [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Refreshing network info cache for port 1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 784.416164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d1cd5d1-f3c5-4e92-8d33-27fb894bc781 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.245s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.612134] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dd25ed-a924-bf83-187a-2fd4552cdca8, 'name': SearchDatastore_Task, 'duration_secs': 0.017199} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.614114] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.614552] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 5e54e7f4-3df1-4283-bee1-a7e475051a24/5e54e7f4-3df1-4283-bee1-a7e475051a24.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 784.615236] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6c400c2-5cb6-47ae-995c-57fb1dbe04ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.625697] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 784.625697] env[65758]: value = "task-4660393" [ 784.625697] env[65758]: _type = "Task" [ 784.625697] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.639820] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660393, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.781809] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Releasing lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.781954] env[65758]: DEBUG nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Instance network_info: |[{"id": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "address": "fa:16:3e:d6:80:6a", "network": {"id": "d7cf91d0-3744-4e97-b6c8-5d962fe28826", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-207695677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d90d1b4e23241798a6e7e14aa2ebc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6691e2d5-9b", "ovs_interfaceid": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 784.782465] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:80:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6691e2d5-9b50-4c74-a64b-2c6f98ae2a44', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.794344] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Creating folder: Project (6d90d1b4e23241798a6e7e14aa2ebc3d). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 784.797847] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4a2f637-d101-4389-ad62-02be30271b29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.820411] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Created folder: Project (6d90d1b4e23241798a6e7e14aa2ebc3d) in parent group-v909763. [ 784.820545] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Creating folder: Instances. Parent ref: group-v909903. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 784.820788] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c708807-73e9-4426-9701-59218c21d0be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.842913] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Created folder: Instances in parent group-v909903. [ 784.842913] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 784.842913] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 784.842913] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b6dbe44-0cf6-4c3d-8cf2-6967816aeebd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.867783] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.867783] env[65758]: value = "task-4660396" [ 784.867783] env[65758]: _type = "Task" [ 784.867783] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.886354] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660396, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.902020] env[65758]: WARNING neutronclient.v2_0.client [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 784.902503] env[65758]: WARNING openstack [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 784.903066] env[65758]: WARNING openstack [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 784.920044] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 785.020449] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd9d0c9-904b-4f6e-be12-52554fdf9206 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.035336] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064f7b64-f994-465d-8b93-06fead639742 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.081238] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9686413d-321d-4068-9682-64d69a70b77b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.099241] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb0c3ac-b208-4fc9-a5a3-3ad2effd0420 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.119106] env[65758]: DEBUG nova.compute.provider_tree [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.141406] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660393, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.303719] env[65758]: WARNING neutronclient.v2_0.client [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 785.304514] env[65758]: WARNING openstack [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 785.304891] env[65758]: WARNING openstack [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 785.379604] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660396, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.448388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.463397] env[65758]: DEBUG nova.network.neutron [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Updated VIF entry in instance network info cache for port 1b1c3792-b109-4ead-81ff-2d275ce2dbc7. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 785.464521] env[65758]: DEBUG nova.network.neutron [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Updating instance_info_cache with network_info: [{"id": "1b1c3792-b109-4ead-81ff-2d275ce2dbc7", "address": "fa:16:3e:83:4f:26", "network": {"id": "f0e3ba82-1389-4626-ac32-5f202383ce8a", "bridge": "br-int", "label": "tempest-ServersTestJSON-50832280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "296e50c9805843949e592a0ab985d3a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1c3792-b1", "ovs_interfaceid": "1b1c3792-b109-4ead-81ff-2d275ce2dbc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 785.625234] env[65758]: DEBUG nova.scheduler.client.report [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 785.642751] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660393, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608239} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.642751] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 5e54e7f4-3df1-4283-bee1-a7e475051a24/5e54e7f4-3df1-4283-bee1-a7e475051a24.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 785.643105] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 785.643211] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f66f50fb-2deb-4e52-8440-8858166b6b50 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.653022] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 785.653022] env[65758]: value = "task-4660397" [ 785.653022] env[65758]: _type = "Task" [ 785.653022] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.664433] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660397, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.855024] env[65758]: DEBUG oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc2f93-7d8e-dca4-44d6-c605f50ed68f/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 785.855999] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627d4a3e-8cb3-448e-9276-dcabad4213fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.863976] env[65758]: DEBUG oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc2f93-7d8e-dca4-44d6-c605f50ed68f/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 785.864224] env[65758]: ERROR oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc2f93-7d8e-dca4-44d6-c605f50ed68f/disk-0.vmdk due to incomplete transfer. [ 785.864490] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-321b6892-69e9-4006-94c1-0498ec56dc5c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.873913] env[65758]: DEBUG oslo_vmware.rw_handles [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc2f93-7d8e-dca4-44d6-c605f50ed68f/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 785.874166] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Uploaded image 7cbb6a07-ab2a-4f4a-8545-e782d07c7e15 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 785.875900] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 785.876594] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ff08d551-80fe-4175-b15f-47095518b0b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.881900] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660396, 'name': CreateVM_Task, 'duration_secs': 0.608615} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.882087] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.882581] env[65758]: WARNING neutronclient.v2_0.client [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 785.882935] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.883106] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.883475] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 785.883680] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-177918a8-a135-4928-aef3-8b302920ba0e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.886590] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 785.886590] env[65758]: value = "task-4660398" [ 785.886590] env[65758]: _type = "Task" [ 785.886590] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.891733] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 785.891733] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524a507f-ce3e-b2b1-1229-9d2a9258bed9" [ 785.891733] env[65758]: _type = "Task" [ 785.891733] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.899433] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660398, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.905886] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a507f-ce3e-b2b1-1229-9d2a9258bed9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.968725] env[65758]: DEBUG oslo_concurrency.lockutils [req-e55398ac-040c-46e3-8eed-af2e28c9e07f req-c05ecca1-9a70-4311-af20-6851b8c74e9c service nova] Releasing lock "refresh_cache-47bb5b02-4f84-468e-ad46-2c1c96b65c97" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.137297] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.876s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.137828] env[65758]: DEBUG nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 786.140571] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.658s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.140754] env[65758]: DEBUG nova.objects.instance [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 786.169714] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660397, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095406} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.170900] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 786.171055] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bf7429-ab8c-402a-a5f3-3624e6f285b4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.193667] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 5e54e7f4-3df1-4283-bee1-a7e475051a24/5e54e7f4-3df1-4283-bee1-a7e475051a24.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 786.195559] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0507ae0d-0104-469f-a833-016d71f0e60a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.222569] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 786.222569] env[65758]: value = "task-4660399" [ 786.222569] env[65758]: _type = "Task" [ 786.222569] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.239322] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660399, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.323823] env[65758]: DEBUG nova.network.neutron [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Successfully updated port: 99223b4e-c230-4330-8b02-e0b49b37f50f {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 786.400011] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660398, 'name': Destroy_Task, 'duration_secs': 0.390715} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.400749] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Destroyed the VM [ 786.401054] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 786.401562] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-de0ea74c-7593-4874-ad23-bc74fa6cfcec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.408303] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a507f-ce3e-b2b1-1229-9d2a9258bed9, 'name': SearchDatastore_Task, 'duration_secs': 0.019126} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.409315] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.409589] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.409854] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.410000] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.410194] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.410502] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac32b81e-3186-45db-87d2-87bf23737966 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.419491] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 786.419491] env[65758]: value = "task-4660400" [ 786.419491] env[65758]: _type = "Task" [ 786.419491] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.423182] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.424094] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 786.428164] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a322a900-7cc5-4d16-9c14-6a638be86a93 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.435974] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660400, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.440132] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 786.440132] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a0a442-c2cb-0396-a6ec-18091cd223b4" [ 786.440132] env[65758]: _type = "Task" [ 786.440132] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.452255] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a0a442-c2cb-0396-a6ec-18091cd223b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.646133] env[65758]: DEBUG nova.compute.utils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 786.652208] env[65758]: DEBUG nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 786.655282] env[65758]: DEBUG nova.network.neutron [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 786.655282] env[65758]: WARNING neutronclient.v2_0.client [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 786.655282] env[65758]: WARNING neutronclient.v2_0.client [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 786.655282] env[65758]: WARNING openstack [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 786.655282] env[65758]: WARNING openstack [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 786.736304] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660399, 'name': ReconfigVM_Task, 'duration_secs': 0.308929} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.736304] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 5e54e7f4-3df1-4283-bee1-a7e475051a24/5e54e7f4-3df1-4283-bee1-a7e475051a24.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.736780] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc40cb99-3f90-486c-a067-7ab6399fecf3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.746030] env[65758]: DEBUG nova.policy [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b6e413458a84a9b8f2b6dcd0061fc33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd550f85853f447bb91a89b6bc6c5720', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.749676] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 786.749676] env[65758]: value = "task-4660401" [ 786.749676] env[65758]: _type = "Task" [ 786.749676] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.760222] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660401, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.827182] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.828037] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquired lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.828037] env[65758]: DEBUG nova.network.neutron [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 786.904273] env[65758]: DEBUG nova.compute.manager [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Received event network-vif-plugged-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.905506] env[65758]: DEBUG oslo_concurrency.lockutils [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Acquiring lock "56ff4122-a999-4caf-b805-0754a66d6bc7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.906905] env[65758]: DEBUG oslo_concurrency.lockutils [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Lock "56ff4122-a999-4caf-b805-0754a66d6bc7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.908191] env[65758]: DEBUG oslo_concurrency.lockutils [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Lock "56ff4122-a999-4caf-b805-0754a66d6bc7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.908889] env[65758]: DEBUG nova.compute.manager [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] No waiting events found dispatching network-vif-plugged-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 786.909156] env[65758]: WARNING nova.compute.manager [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Received unexpected event network-vif-plugged-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 for instance with vm_state building and task_state spawning. [ 786.909678] env[65758]: DEBUG nova.compute.manager [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Received event network-changed-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.910081] env[65758]: DEBUG nova.compute.manager [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Refreshing instance network info cache due to event network-changed-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 786.910165] env[65758]: DEBUG oslo_concurrency.lockutils [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Acquiring lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.910354] env[65758]: DEBUG oslo_concurrency.lockutils [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Acquired lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.911328] env[65758]: DEBUG nova.network.neutron [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Refreshing network info cache for port 6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 786.930539] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660400, 'name': RemoveSnapshot_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.955967] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a0a442-c2cb-0396-a6ec-18091cd223b4, 'name': SearchDatastore_Task, 'duration_secs': 0.011048} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.957255] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39fdd2af-7041-4f75-9466-1223d201ca29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.964239] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 786.964239] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52eb939b-eae1-31e7-9a82-b3667a9d2e4f" [ 786.964239] env[65758]: _type = "Task" [ 786.964239] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.976009] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52eb939b-eae1-31e7-9a82-b3667a9d2e4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.996821] env[65758]: DEBUG nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Received event network-vif-plugged-6cc91558-00db-46cf-a8a1-93f06ecf3e20 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.996897] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Acquiring lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.997263] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.997309] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.997457] env[65758]: DEBUG nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] No waiting events found dispatching network-vif-plugged-6cc91558-00db-46cf-a8a1-93f06ecf3e20 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 786.997616] env[65758]: WARNING nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Received unexpected event network-vif-plugged-6cc91558-00db-46cf-a8a1-93f06ecf3e20 for instance with vm_state building and task_state spawning. [ 786.997767] env[65758]: DEBUG nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Received event network-changed-6cc91558-00db-46cf-a8a1-93f06ecf3e20 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 786.997913] env[65758]: DEBUG nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Refreshing instance network info cache due to event network-changed-6cc91558-00db-46cf-a8a1-93f06ecf3e20. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 786.998085] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Acquiring lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.128950] env[65758]: DEBUG nova.network.neutron [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Successfully created port: e31ffc86-5e08-405f-8129-6af1973003bf {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 787.170155] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2d770dd0-1492-47ba-875f-5eb337d7ac4d tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.027s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.170155] env[65758]: DEBUG nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 787.174508] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.251s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.176300] env[65758]: INFO nova.compute.claims [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.260421] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660401, 'name': Rename_Task, 'duration_secs': 0.324812} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.261091] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 787.261091] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7643b0d4-b42d-4bff-a6cb-145b3396fa6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.270050] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 787.270050] env[65758]: value = "task-4660402" [ 787.270050] env[65758]: _type = "Task" [ 787.270050] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.279077] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.332700] env[65758]: WARNING openstack [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 787.332700] env[65758]: WARNING openstack [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 787.415201] env[65758]: WARNING neutronclient.v2_0.client [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 787.415799] env[65758]: WARNING openstack [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 787.416475] env[65758]: WARNING openstack [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 787.437638] env[65758]: DEBUG oslo_vmware.api [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660400, 'name': RemoveSnapshot_Task, 'duration_secs': 0.97687} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.437638] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 787.437638] env[65758]: INFO nova.compute.manager [None req-9120aacd-0ac0-44d5-93bb-a35e125f5ab4 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Took 17.44 seconds to snapshot the instance on the hypervisor. [ 787.478339] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52eb939b-eae1-31e7-9a82-b3667a9d2e4f, 'name': SearchDatastore_Task, 'duration_secs': 0.013009} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.478689] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.479526] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 56ff4122-a999-4caf-b805-0754a66d6bc7/56ff4122-a999-4caf-b805-0754a66d6bc7.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.480271] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-831d3e2d-eb9f-48a8-85e2-bc23211a1243 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.489921] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 787.489921] env[65758]: value = "task-4660403" [ 787.489921] env[65758]: _type = "Task" [ 787.489921] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.500712] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.781824] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660402, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.001968] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660403, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.185134] env[65758]: DEBUG nova.network.neutron [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 788.192744] env[65758]: DEBUG nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 788.223148] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 788.224068] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 788.224068] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 788.224201] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 788.224576] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 788.224824] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 788.225178] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 788.225495] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 788.225711] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 788.226493] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 788.226808] env[65758]: DEBUG nova.virt.hardware [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 788.228602] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56be8e6-d883-4683-b3b8-e71aa091c810 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.245542] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8676c0-c471-4a36-b601-5d9157beb4a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.287878] env[65758]: DEBUG oslo_vmware.api [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660402, 'name': PowerOnVM_Task, 'duration_secs': 0.87362} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.291960] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 788.292338] env[65758]: DEBUG nova.compute.manager [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 788.293880] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5179ce1a-3d95-4ccb-b69c-58627671d2cf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.510841] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514689} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.511204] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 56ff4122-a999-4caf-b805-0754a66d6bc7/56ff4122-a999-4caf-b805-0754a66d6bc7.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.511432] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.511703] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76885e93-fb66-4a0f-9070-db39efa1894c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.521274] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 788.521274] env[65758]: value = "task-4660404" [ 788.521274] env[65758]: _type = "Task" [ 788.521274] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.530558] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.773383] env[65758]: DEBUG nova.network.neutron [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Successfully updated port: e31ffc86-5e08-405f-8129-6af1973003bf {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 788.822037] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.909615] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515be62c-8c85-43b7-b884-534ed15a991d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.919543] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38e0e57-fff5-4047-a653-d9f17c2f32f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.955567] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0630e351-38c4-48aa-8537-fb9f6e8d60e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.962838] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.963115] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.969102] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f225537d-98b5-4330-95c6-9d7e98409751 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.988519] env[65758]: DEBUG nova.compute.provider_tree [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.032489] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070966} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.033639] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 789.034640] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb788c65-87ad-4c94-95e7-f1f335296594 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.039761] env[65758]: WARNING neutronclient.v2_0.client [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 789.040095] env[65758]: WARNING openstack [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 789.040408] env[65758]: WARNING openstack [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 789.077470] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 56ff4122-a999-4caf-b805-0754a66d6bc7/56ff4122-a999-4caf-b805-0754a66d6bc7.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.081091] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e02e0326-3946-4035-892a-c4ed4fd329d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.103538] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 789.103538] env[65758]: value = "task-4660405" [ 789.103538] env[65758]: _type = "Task" [ 789.103538] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.112318] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.182655] env[65758]: WARNING neutronclient.v2_0.client [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 789.183103] env[65758]: WARNING openstack [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 789.183495] env[65758]: WARNING openstack [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 789.284664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.285572] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.285572] env[65758]: DEBUG nova.network.neutron [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 789.362580] env[65758]: DEBUG nova.network.neutron [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Updated VIF entry in instance network info cache for port 6691e2d5-9b50-4c74-a64b-2c6f98ae2a44. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 789.363030] env[65758]: DEBUG nova.network.neutron [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Updating instance_info_cache with network_info: [{"id": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "address": "fa:16:3e:d6:80:6a", "network": {"id": "d7cf91d0-3744-4e97-b6c8-5d962fe28826", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-207695677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d90d1b4e23241798a6e7e14aa2ebc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6691e2d5-9b", "ovs_interfaceid": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 789.417013] env[65758]: WARNING neutronclient.v2_0.client [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 789.417370] env[65758]: WARNING openstack [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 789.417879] env[65758]: WARNING openstack [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 789.498194] env[65758]: DEBUG nova.scheduler.client.report [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 789.562821] env[65758]: DEBUG nova.network.neutron [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Updating instance_info_cache with network_info: [{"id": "6cc91558-00db-46cf-a8a1-93f06ecf3e20", "address": "fa:16:3e:d7:f7:43", "network": {"id": "d0f625ce-05fb-4722-9b7f-d2cba3ce9644", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1503373697", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc91558-00", "ovs_interfaceid": "6cc91558-00db-46cf-a8a1-93f06ecf3e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "99223b4e-c230-4330-8b02-e0b49b37f50f", "address": "fa:16:3e:b9:d4:89", "network": {"id": "b17b1505-4aba-4bde-a1cf-e0fbf23efe0b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-302128356", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fdd0624-2edb-4733-8284-225815c07f73", "external-id": "nsx-vlan-transportzone-330", "segmentation_id": 330, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99223b4e-c2", "ovs_interfaceid": "99223b4e-c230-4330-8b02-e0b49b37f50f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 789.617409] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660405, 'name': ReconfigVM_Task, 'duration_secs': 0.324325} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.617409] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 56ff4122-a999-4caf-b805-0754a66d6bc7/56ff4122-a999-4caf-b805-0754a66d6bc7.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.617409] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f979f7a-d5ba-4c17-8b0e-6135590742d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.629639] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 789.629639] env[65758]: value = "task-4660406" [ 789.629639] env[65758]: _type = "Task" [ 789.629639] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.640643] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660406, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.788364] env[65758]: WARNING openstack [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 789.788804] env[65758]: WARNING openstack [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 789.837395] env[65758]: DEBUG nova.network.neutron [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 789.866169] env[65758]: DEBUG oslo_concurrency.lockutils [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] Releasing lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.866459] env[65758]: DEBUG nova.compute.manager [req-d1c68b08-6dca-44b3-ab47-3249d04a8ab9 req-08f99d05-a1ca-495b-9fe8-103ae0d3f493 service nova] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Received event network-vif-deleted-12b480c3-4c9e-4da0-9f51-8b29cd9f54ce {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 789.982063] env[65758]: DEBUG nova.compute.manager [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 790.008058] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.831s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.008058] env[65758]: DEBUG nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 790.011713] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.690s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.012398] env[65758]: DEBUG nova.objects.instance [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lazy-loading 'resources' on Instance uuid 148eddf4-4c01-47bc-be81-451ca57e7347 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 790.068022] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Releasing lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.068022] env[65758]: DEBUG nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Instance network_info: |[{"id": "6cc91558-00db-46cf-a8a1-93f06ecf3e20", "address": "fa:16:3e:d7:f7:43", "network": {"id": "d0f625ce-05fb-4722-9b7f-d2cba3ce9644", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1503373697", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc91558-00", "ovs_interfaceid": "6cc91558-00db-46cf-a8a1-93f06ecf3e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "99223b4e-c230-4330-8b02-e0b49b37f50f", "address": "fa:16:3e:b9:d4:89", "network": {"id": "b17b1505-4aba-4bde-a1cf-e0fbf23efe0b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-302128356", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fdd0624-2edb-4733-8284-225815c07f73", "external-id": "nsx-vlan-transportzone-330", "segmentation_id": 330, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99223b4e-c2", "ovs_interfaceid": "99223b4e-c230-4330-8b02-e0b49b37f50f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 790.068022] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Acquired lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.068022] env[65758]: DEBUG nova.network.neutron [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Refreshing network info cache for port 6cc91558-00db-46cf-a8a1-93f06ecf3e20 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 790.068022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:f7:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6cc91558-00db-46cf-a8a1-93f06ecf3e20', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:d4:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fdd0624-2edb-4733-8284-225815c07f73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99223b4e-c230-4330-8b02-e0b49b37f50f', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.081832] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 790.083992] env[65758]: WARNING neutronclient.v2_0.client [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 790.084881] env[65758]: WARNING openstack [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 790.085429] env[65758]: WARNING openstack [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 790.092375] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.093367] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04321a3d-27a3-40a2-aec0-d6dac36afa21 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.118358] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.118358] env[65758]: value = "task-4660407" [ 790.118358] env[65758]: _type = "Task" [ 790.118358] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.135143] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660407, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.144457] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660406, 'name': Rename_Task, 'duration_secs': 0.171853} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.144947] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.147156] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8652af3-8ee9-4468-98f1-bcdc8a880f6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.154463] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 790.154463] env[65758]: value = "task-4660408" [ 790.154463] env[65758]: _type = "Task" [ 790.154463] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.166800] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.483996] env[65758]: WARNING neutronclient.v2_0.client [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 790.484818] env[65758]: WARNING openstack [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 790.485189] env[65758]: WARNING openstack [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 790.512467] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.516186] env[65758]: DEBUG nova.compute.utils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 790.520937] env[65758]: DEBUG nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 790.521167] env[65758]: DEBUG nova.network.neutron [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 790.521482] env[65758]: WARNING neutronclient.v2_0.client [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 790.521768] env[65758]: WARNING neutronclient.v2_0.client [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 790.522413] env[65758]: WARNING openstack [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 790.522773] env[65758]: WARNING openstack [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 790.617975] env[65758]: DEBUG nova.policy [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c394719abc07477f8c6d03c3c6ee02f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6192e0af007d495c85f98e1a72ab56eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 790.638389] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660407, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.673521] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660408, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.679103] env[65758]: DEBUG nova.compute.manager [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Received event network-vif-plugged-e31ffc86-5e08-405f-8129-6af1973003bf {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 790.679302] env[65758]: DEBUG oslo_concurrency.lockutils [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Acquiring lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.679431] env[65758]: DEBUG oslo_concurrency.lockutils [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.679598] env[65758]: DEBUG oslo_concurrency.lockutils [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.679883] env[65758]: DEBUG nova.compute.manager [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] No waiting events found dispatching network-vif-plugged-e31ffc86-5e08-405f-8129-6af1973003bf {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 790.680129] env[65758]: WARNING nova.compute.manager [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Received unexpected event network-vif-plugged-e31ffc86-5e08-405f-8129-6af1973003bf for instance with vm_state building and task_state spawning. [ 790.680505] env[65758]: DEBUG nova.compute.manager [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Received event network-changed-e31ffc86-5e08-405f-8129-6af1973003bf {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 790.680505] env[65758]: DEBUG nova.compute.manager [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Refreshing instance network info cache due to event network-changed-e31ffc86-5e08-405f-8129-6af1973003bf. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 790.680505] env[65758]: DEBUG oslo_concurrency.lockutils [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Acquiring lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.703754] env[65758]: DEBUG nova.network.neutron [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance_info_cache with network_info: [{"id": "e31ffc86-5e08-405f-8129-6af1973003bf", "address": "fa:16:3e:88:0c:68", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31ffc86-5e", "ovs_interfaceid": "e31ffc86-5e08-405f-8129-6af1973003bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 790.796387] env[65758]: WARNING neutronclient.v2_0.client [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 790.797243] env[65758]: WARNING openstack [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 790.797574] env[65758]: WARNING openstack [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 791.028896] env[65758]: DEBUG nova.network.neutron [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Updated VIF entry in instance network info cache for port 6cc91558-00db-46cf-a8a1-93f06ecf3e20. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 791.029364] env[65758]: DEBUG nova.network.neutron [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Updating instance_info_cache with network_info: [{"id": "6cc91558-00db-46cf-a8a1-93f06ecf3e20", "address": "fa:16:3e:d7:f7:43", "network": {"id": "d0f625ce-05fb-4722-9b7f-d2cba3ce9644", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1503373697", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc91558-00", "ovs_interfaceid": "6cc91558-00db-46cf-a8a1-93f06ecf3e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "99223b4e-c230-4330-8b02-e0b49b37f50f", "address": "fa:16:3e:b9:d4:89", "network": {"id": "b17b1505-4aba-4bde-a1cf-e0fbf23efe0b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-302128356", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fdd0624-2edb-4733-8284-225815c07f73", "external-id": "nsx-vlan-transportzone-330", "segmentation_id": 330, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99223b4e-c2", "ovs_interfaceid": "99223b4e-c230-4330-8b02-e0b49b37f50f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 791.030988] env[65758]: DEBUG nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 791.137235] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660407, 'name': CreateVM_Task, 'duration_secs': 0.749809} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.137577] env[65758]: DEBUG nova.network.neutron [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Successfully created port: 98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 791.141703] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.145151] env[65758]: WARNING neutronclient.v2_0.client [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 791.145439] env[65758]: WARNING neutronclient.v2_0.client [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 791.145649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.145791] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.146122] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.146613] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a08136cc-66fb-4463-827a-d841a0a2d100 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.153034] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 791.153034] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b1d990-da0d-edb3-aa77-094bdbe9140d" [ 791.153034] env[65758]: _type = "Task" [ 791.153034] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.167026] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b1d990-da0d-edb3-aa77-094bdbe9140d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.174289] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660408, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.206273] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.207465] env[65758]: DEBUG nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Instance network_info: |[{"id": "e31ffc86-5e08-405f-8129-6af1973003bf", "address": "fa:16:3e:88:0c:68", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31ffc86-5e", "ovs_interfaceid": "e31ffc86-5e08-405f-8129-6af1973003bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 791.207465] env[65758]: DEBUG oslo_concurrency.lockutils [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Acquired lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.207465] env[65758]: DEBUG nova.network.neutron [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Refreshing network info cache for port e31ffc86-5e08-405f-8129-6af1973003bf {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 791.208473] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:0c:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e31ffc86-5e08-405f-8129-6af1973003bf', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 791.219843] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 791.226267] env[65758]: WARNING neutronclient.v2_0.client [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 791.226267] env[65758]: WARNING openstack [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 791.226267] env[65758]: WARNING openstack [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 791.233306] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 791.234129] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6fd17773-285a-4be7-8961-ebf8b18368d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.265125] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 791.265125] env[65758]: value = "task-4660409" [ 791.265125] env[65758]: _type = "Task" [ 791.265125] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.276099] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660409, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.310132] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0eb386-7db7-4355-980b-5b89ff93438a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.322843] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335cb5d0-b7af-41be-8f8c-0f03eccf49ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.363327] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8370dccc-add3-4604-a8ff-b792d142ab45 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.372042] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e825083a-67ed-45f9-b628-b9893e047792 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.388529] env[65758]: DEBUG nova.compute.provider_tree [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.535156] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Releasing lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.535505] env[65758]: DEBUG nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Received event network-vif-plugged-99223b4e-c230-4330-8b02-e0b49b37f50f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 791.535716] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Acquiring lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.535936] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.536132] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.536297] env[65758]: DEBUG nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] No waiting events found dispatching network-vif-plugged-99223b4e-c230-4330-8b02-e0b49b37f50f {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 791.536497] env[65758]: WARNING nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Received unexpected event network-vif-plugged-99223b4e-c230-4330-8b02-e0b49b37f50f for instance with vm_state building and task_state spawning. [ 791.536674] env[65758]: DEBUG nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Received event network-changed-99223b4e-c230-4330-8b02-e0b49b37f50f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 791.536828] env[65758]: DEBUG nova.compute.manager [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Refreshing instance network info cache due to event network-changed-99223b4e-c230-4330-8b02-e0b49b37f50f. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 791.537083] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Acquiring lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.537229] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Acquired lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.537420] env[65758]: DEBUG nova.network.neutron [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Refreshing network info cache for port 99223b4e-c230-4330-8b02-e0b49b37f50f {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 791.622791] env[65758]: WARNING neutronclient.v2_0.client [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 791.623531] env[65758]: WARNING openstack [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 791.624044] env[65758]: WARNING openstack [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 791.667507] env[65758]: DEBUG oslo_vmware.api [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660408, 'name': PowerOnVM_Task, 'duration_secs': 1.066133} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.674832] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.675061] env[65758]: INFO nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Took 11.69 seconds to spawn the instance on the hypervisor. [ 791.675239] env[65758]: DEBUG nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 791.675541] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b1d990-da0d-edb3-aa77-094bdbe9140d, 'name': SearchDatastore_Task, 'duration_secs': 0.027638} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.676351] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1823a9-b0bf-48b2-b5eb-bcdb8f940cf3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.687419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.687419] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.687419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.687419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.687419] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.687419] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4f5a22c-8e7f-4e89-82e2-ac8858d69de0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.700510] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.700510] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.700852] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb518861-fa38-48f8-a039-600929bf01fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.712463] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 791.712463] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc14ff-00e0-f6a6-6548-4b567a9d411c" [ 791.712463] env[65758]: _type = "Task" [ 791.712463] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.733948] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc14ff-00e0-f6a6-6548-4b567a9d411c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.777431] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660409, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.796371] env[65758]: DEBUG nova.network.neutron [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updated VIF entry in instance network info cache for port e31ffc86-5e08-405f-8129-6af1973003bf. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 791.796766] env[65758]: DEBUG nova.network.neutron [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance_info_cache with network_info: [{"id": "e31ffc86-5e08-405f-8129-6af1973003bf", "address": "fa:16:3e:88:0c:68", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31ffc86-5e", "ovs_interfaceid": "e31ffc86-5e08-405f-8129-6af1973003bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 791.892107] env[65758]: DEBUG nova.scheduler.client.report [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.046676] env[65758]: WARNING neutronclient.v2_0.client [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 792.047383] env[65758]: WARNING openstack [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 792.047830] env[65758]: WARNING openstack [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 792.056603] env[65758]: DEBUG nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 792.080340] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 792.080694] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.080865] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 792.081057] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.081201] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 792.081337] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 792.081535] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.081685] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 792.081843] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 792.081996] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 792.082267] env[65758]: DEBUG nova.virt.hardware [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 792.083174] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e2bbe9-371a-4348-93c7-4083982410d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.094496] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6f9bd7-06d4-4bab-ad73-cd62891074a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.210034] env[65758]: INFO nova.compute.manager [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Took 55.45 seconds to build instance. [ 792.211855] env[65758]: DEBUG nova.compute.manager [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 792.213477] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c460eb-d4ea-4572-9af4-855d3d5b2bab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.228845] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc14ff-00e0-f6a6-6548-4b567a9d411c, 'name': SearchDatastore_Task, 'duration_secs': 0.014029} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.234267] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-750a55eb-3667-4df1-9924-fb179d007099 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.245144] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 792.245144] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b751e5-ee9d-04e3-1191-f8669fa83686" [ 792.245144] env[65758]: _type = "Task" [ 792.245144] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.256734] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b751e5-ee9d-04e3-1191-f8669fa83686, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.277670] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660409, 'name': CreateVM_Task, 'duration_secs': 0.690141} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.278535] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 792.278535] env[65758]: WARNING neutronclient.v2_0.client [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 792.278741] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.278887] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.279220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 792.279486] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-023b301b-2732-43bf-af35-dc3e216e4dab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.285987] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 792.285987] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a5d3aa-ce2b-cf2e-fbca-0eeff7ae6f9c" [ 792.285987] env[65758]: _type = "Task" [ 792.285987] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.298561] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a5d3aa-ce2b-cf2e-fbca-0eeff7ae6f9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.300449] env[65758]: DEBUG oslo_concurrency.lockutils [req-cab2d196-45b2-413c-9379-19c302e1f4be req-4e44bf0c-a1ae-4063-b8bd-da137fdd45b8 service nova] Releasing lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.324499] env[65758]: WARNING neutronclient.v2_0.client [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 792.325481] env[65758]: WARNING openstack [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 792.325834] env[65758]: WARNING openstack [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 792.399748] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.388s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.403811] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.960s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.404081] env[65758]: DEBUG nova.objects.instance [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lazy-loading 'resources' on Instance uuid 03073968-e679-4ce5-9f84-c4765217b308 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 792.425634] env[65758]: DEBUG nova.network.neutron [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Updated VIF entry in instance network info cache for port 99223b4e-c230-4330-8b02-e0b49b37f50f. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 792.426152] env[65758]: DEBUG nova.network.neutron [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Updating instance_info_cache with network_info: [{"id": "6cc91558-00db-46cf-a8a1-93f06ecf3e20", "address": "fa:16:3e:d7:f7:43", "network": {"id": "d0f625ce-05fb-4722-9b7f-d2cba3ce9644", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1503373697", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc91558-00", "ovs_interfaceid": "6cc91558-00db-46cf-a8a1-93f06ecf3e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "99223b4e-c230-4330-8b02-e0b49b37f50f", "address": "fa:16:3e:b9:d4:89", "network": {"id": "b17b1505-4aba-4bde-a1cf-e0fbf23efe0b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-302128356", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "237226a477354874a363a8670187a1a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fdd0624-2edb-4733-8284-225815c07f73", "external-id": "nsx-vlan-transportzone-330", "segmentation_id": 330, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99223b4e-c2", "ovs_interfaceid": "99223b4e-c230-4330-8b02-e0b49b37f50f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 792.444370] env[65758]: INFO nova.scheduler.client.report [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleted allocations for instance 148eddf4-4c01-47bc-be81-451ca57e7347 [ 792.625665] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "5e54e7f4-3df1-4283-bee1-a7e475051a24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.626026] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "5e54e7f4-3df1-4283-bee1-a7e475051a24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.626294] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "5e54e7f4-3df1-4283-bee1-a7e475051a24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.626912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "5e54e7f4-3df1-4283-bee1-a7e475051a24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.626912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "5e54e7f4-3df1-4283-bee1-a7e475051a24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.629063] env[65758]: INFO nova.compute.manager [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Terminating instance [ 792.714735] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bef51da3-11e9-4bca-8696-040b84865519 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "56ff4122-a999-4caf-b805-0754a66d6bc7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.106s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.739644] env[65758]: INFO nova.compute.manager [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] instance snapshotting [ 792.742558] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c849ab5-6666-484b-987b-6d8050d2bc28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.756611] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b751e5-ee9d-04e3-1191-f8669fa83686, 'name': SearchDatastore_Task, 'duration_secs': 0.033472} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.771010] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.774815] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 81f961c3-ec8f-4281-be18-5d605fa73ecc/81f961c3-ec8f-4281-be18-5d605fa73ecc.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.775707] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7cf16c57-2843-482b-8a15-a4038f61ccdd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.779167] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4de6dbe-e505-4b7e-bf50-fe786e3ce622 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.793792] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 792.793792] env[65758]: value = "task-4660410" [ 792.793792] env[65758]: _type = "Task" [ 792.793792] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.803575] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a5d3aa-ce2b-cf2e-fbca-0eeff7ae6f9c, 'name': SearchDatastore_Task, 'duration_secs': 0.023099} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.804367] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.804612] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 792.804870] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.805033] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.805642] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.805642] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91f25991-572c-4d18-8bd2-6fd4bcf7d8bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.812941] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660410, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.831444] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.831669] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 792.832543] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2f6fac0-5072-4dc4-8fe0-7e57b5e56bbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.841342] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 792.841342] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8b23f-120c-5bc0-9dec-c8e721926ead" [ 792.841342] env[65758]: _type = "Task" [ 792.841342] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.845935] env[65758]: DEBUG nova.network.neutron [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Successfully updated port: 98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 792.855498] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8b23f-120c-5bc0-9dec-c8e721926ead, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.930405] env[65758]: DEBUG oslo_concurrency.lockutils [req-3b452ecb-e02a-4ffe-97a6-1dc77ffc0bca req-ac7a06c8-1075-4207-b8d2-505c4408d5f5 service nova] Releasing lock "refresh_cache-81f961c3-ec8f-4281-be18-5d605fa73ecc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.958654] env[65758]: DEBUG oslo_concurrency.lockutils [None req-10e3f1d3-0663-47b3-8202-898158757939 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "148eddf4-4c01-47bc-be81-451ca57e7347" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.485s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.015580] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.015815] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.132754] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "refresh_cache-5e54e7f4-3df1-4283-bee1-a7e475051a24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.132945] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquired lock "refresh_cache-5e54e7f4-3df1-4283-bee1-a7e475051a24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.133160] env[65758]: DEBUG nova.network.neutron [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 793.204848] env[65758]: DEBUG nova.compute.manager [req-f3cd748a-2dc1-41d0-9c58-6e97ebb74306 req-ca52a433-a3db-4efb-8aaf-15679152fa76 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Received event network-vif-plugged-98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 793.204848] env[65758]: DEBUG oslo_concurrency.lockutils [req-f3cd748a-2dc1-41d0-9c58-6e97ebb74306 req-ca52a433-a3db-4efb-8aaf-15679152fa76 service nova] Acquiring lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.204848] env[65758]: DEBUG oslo_concurrency.lockutils [req-f3cd748a-2dc1-41d0-9c58-6e97ebb74306 req-ca52a433-a3db-4efb-8aaf-15679152fa76 service nova] Lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.205109] env[65758]: DEBUG oslo_concurrency.lockutils [req-f3cd748a-2dc1-41d0-9c58-6e97ebb74306 req-ca52a433-a3db-4efb-8aaf-15679152fa76 service nova] Lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.207926] env[65758]: DEBUG nova.compute.manager [req-f3cd748a-2dc1-41d0-9c58-6e97ebb74306 req-ca52a433-a3db-4efb-8aaf-15679152fa76 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] No waiting events found dispatching network-vif-plugged-98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 793.208142] env[65758]: WARNING nova.compute.manager [req-f3cd748a-2dc1-41d0-9c58-6e97ebb74306 req-ca52a433-a3db-4efb-8aaf-15679152fa76 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Received unexpected event network-vif-plugged-98886eae-63b7-4cb2-a8dc-f86495b733a9 for instance with vm_state building and task_state spawning. [ 793.218766] env[65758]: DEBUG nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 793.303820] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 793.307614] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-43942999-076b-4891-846f-60c9e7488f8c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.317302] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660410, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.319600] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 793.319600] env[65758]: value = "task-4660411" [ 793.319600] env[65758]: _type = "Task" [ 793.319600] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.332278] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660411, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.354635] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquiring lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.354817] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquired lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.355011] env[65758]: DEBUG nova.network.neutron [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 793.356592] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8b23f-120c-5bc0-9dec-c8e721926ead, 'name': SearchDatastore_Task, 'duration_secs': 0.016915} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.357849] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1f7a610-a816-45f1-b2fb-0d8a1bf0b065 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.365408] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 793.365408] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527c20f8-0e09-58b2-d798-4023dddcf46f" [ 793.365408] env[65758]: _type = "Task" [ 793.365408] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.381929] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527c20f8-0e09-58b2-d798-4023dddcf46f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.567610] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd454e9-1722-4f78-85df-2cd80d438df8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.577494] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7260b19b-9a47-475e-8b8b-4c70387f4573 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.613932] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84667e27-38e2-41da-9e51-2020dfc62d70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.622592] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ac9398-57b2-4ca3-8dd5-cb6f353c45c8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.637670] env[65758]: WARNING neutronclient.v2_0.client [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 793.638420] env[65758]: WARNING openstack [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 793.638865] env[65758]: WARNING openstack [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 793.646085] env[65758]: DEBUG nova.compute.provider_tree [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.651473] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "454bd092-f683-4a3a-91c9-65191d6996f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.651684] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "454bd092-f683-4a3a-91c9-65191d6996f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.670304] env[65758]: DEBUG nova.network.neutron [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 793.742389] env[65758]: DEBUG nova.network.neutron [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 793.745539] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.808874] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660410, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.799063} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.809244] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 81f961c3-ec8f-4281-be18-5d605fa73ecc/81f961c3-ec8f-4281-be18-5d605fa73ecc.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.809526] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.809844] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb4bf492-6cc6-415c-91b6-307ebf5d97fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.819408] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 793.819408] env[65758]: value = "task-4660412" [ 793.819408] env[65758]: _type = "Task" [ 793.819408] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.838227] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660411, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.838655] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660412, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.861980] env[65758]: WARNING openstack [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 793.862594] env[65758]: WARNING openstack [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 793.882342] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527c20f8-0e09-58b2-d798-4023dddcf46f, 'name': SearchDatastore_Task, 'duration_secs': 0.066674} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.882719] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.883101] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a9ec9a64-94c7-41a5-a7a4-5e034ddfc592/a9ec9a64-94c7-41a5-a7a4-5e034ddfc592.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 793.883468] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d6a4c00-8f7f-444a-ab8b-c86034aa2169 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.893388] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 793.893388] env[65758]: value = "task-4660413" [ 793.893388] env[65758]: _type = "Task" [ 793.893388] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.902891] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660413, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.904637] env[65758]: DEBUG nova.network.neutron [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 794.004674] env[65758]: WARNING neutronclient.v2_0.client [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 794.005358] env[65758]: WARNING openstack [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 794.006193] env[65758]: WARNING openstack [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 794.110051] env[65758]: DEBUG nova.network.neutron [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Updating instance_info_cache with network_info: [{"id": "98886eae-63b7-4cb2-a8dc-f86495b733a9", "address": "fa:16:3e:01:17:3c", "network": {"id": "f8fcef34-18bc-4207-82e0-7764f3a21019", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1119232469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6192e0af007d495c85f98e1a72ab56eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98886eae-63", "ovs_interfaceid": "98886eae-63b7-4cb2-a8dc-f86495b733a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 794.153586] env[65758]: DEBUG nova.scheduler.client.report [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.245495] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Releasing lock "refresh_cache-5e54e7f4-3df1-4283-bee1-a7e475051a24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.246490] env[65758]: DEBUG nova.compute.manager [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 794.246490] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.247557] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb27640-55bc-4c0e-af05-65d668f34038 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.260093] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.260484] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b615533-cfbc-42a9-94e1-7457cb21b92a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.270437] env[65758]: DEBUG oslo_vmware.api [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 794.270437] env[65758]: value = "task-4660414" [ 794.270437] env[65758]: _type = "Task" [ 794.270437] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.282552] env[65758]: DEBUG oslo_vmware.api [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.334924] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660412, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077914} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.338940] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.339434] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660411, 'name': CreateSnapshot_Task, 'duration_secs': 0.973603} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.340304] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c30731-0587-4694-80fa-87d0fe49a1f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.343538] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 794.344509] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6e7448-f7b2-433b-aa06-b4c499341c17 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.385253] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 81f961c3-ec8f-4281-be18-5d605fa73ecc/81f961c3-ec8f-4281-be18-5d605fa73ecc.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.385973] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76a32ce6-de2a-4c91-ba26-d7c23ded6a5a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.415114] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660413, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.417211] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 794.417211] env[65758]: value = "task-4660415" [ 794.417211] env[65758]: _type = "Task" [ 794.417211] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.428834] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660415, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.509140] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "31816c0c-d7d2-48db-9a87-a1e03c938a60" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.509424] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "31816c0c-d7d2-48db-9a87-a1e03c938a60" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.616024] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Releasing lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.616024] env[65758]: DEBUG nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Instance network_info: |[{"id": "98886eae-63b7-4cb2-a8dc-f86495b733a9", "address": "fa:16:3e:01:17:3c", "network": {"id": "f8fcef34-18bc-4207-82e0-7764f3a21019", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1119232469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6192e0af007d495c85f98e1a72ab56eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98886eae-63", "ovs_interfaceid": "98886eae-63b7-4cb2-a8dc-f86495b733a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 794.616024] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:17:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98886eae-63b7-4cb2-a8dc-f86495b733a9', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.622544] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Creating folder: Project (6192e0af007d495c85f98e1a72ab56eb). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 794.622918] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4469a06-7126-4ca7-9708-cb910d460f58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.635020] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Created folder: Project (6192e0af007d495c85f98e1a72ab56eb) in parent group-v909763. [ 794.635232] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Creating folder: Instances. Parent ref: group-v909909. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 794.635492] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b35a00f-c619-4b52-a38e-c13282b7470b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.647565] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Created folder: Instances in parent group-v909909. [ 794.647837] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 794.648043] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.648267] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9455f3d1-c00b-4cc2-acde-a8c390759121 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.663929] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.260s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.666255] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.595s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.666458] env[65758]: DEBUG nova.objects.instance [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 794.678775] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.678775] env[65758]: value = "task-4660418" [ 794.678775] env[65758]: _type = "Task" [ 794.678775] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.688818] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660418, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.726153] env[65758]: INFO nova.scheduler.client.report [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Deleted allocations for instance 03073968-e679-4ce5-9f84-c4765217b308 [ 794.783737] env[65758]: DEBUG oslo_vmware.api [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660414, 'name': PowerOffVM_Task, 'duration_secs': 0.205697} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.784040] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.784215] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.784579] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc4443e7-1a22-475c-acf1-72a5c2bc968c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.829533] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.829906] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.830206] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Deleting the datastore file [datastore1] 5e54e7f4-3df1-4283-bee1-a7e475051a24 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.831086] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61063d3c-2209-4430-8e81-291898452686 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.839153] env[65758]: DEBUG oslo_vmware.api [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for the task: (returnval){ [ 794.839153] env[65758]: value = "task-4660420" [ 794.839153] env[65758]: _type = "Task" [ 794.839153] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.849043] env[65758]: DEBUG oslo_vmware.api [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.894260] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 794.894713] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-73d88b51-dd29-47b6-84ac-1d67bc11f102 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.909054] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 794.909054] env[65758]: value = "task-4660421" [ 794.909054] env[65758]: _type = "Task" [ 794.909054] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.917164] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660413, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696989} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.917977] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a9ec9a64-94c7-41a5-a7a4-5e034ddfc592/a9ec9a64-94c7-41a5-a7a4-5e034ddfc592.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 794.918129] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 794.918464] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8c1a7d7-8ef1-411d-ac59-5d96a9b379ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.927586] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660421, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.936296] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660415, 'name': ReconfigVM_Task, 'duration_secs': 0.48537} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.937772] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 81f961c3-ec8f-4281-be18-5d605fa73ecc/81f961c3-ec8f-4281-be18-5d605fa73ecc.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.938552] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 794.938552] env[65758]: value = "task-4660422" [ 794.938552] env[65758]: _type = "Task" [ 794.938552] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.938762] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21f04cb1-0c57-49b0-a3e3-633c4cdc4a30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.949707] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 794.949707] env[65758]: value = "task-4660423" [ 794.949707] env[65758]: _type = "Task" [ 794.949707] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.953762] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660422, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.965013] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660423, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.188492] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660418, 'name': CreateVM_Task, 'duration_secs': 0.48474} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.188492] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.188492] env[65758]: WARNING neutronclient.v2_0.client [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 795.188880] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.189090] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.189453] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.189649] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c642597-7326-4409-a31f-455113c3352f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.197170] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 795.197170] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529a9265-a9d3-d0b9-4254-8c01c11adad6" [ 795.197170] env[65758]: _type = "Task" [ 795.197170] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.208789] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529a9265-a9d3-d0b9-4254-8c01c11adad6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.236616] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7b75dc80-3593-4793-873b-c136cb591639 tempest-ListServersNegativeTestJSON-308270416 tempest-ListServersNegativeTestJSON-308270416-project-member] Lock "03073968-e679-4ce5-9f84-c4765217b308" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.293s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.356927] env[65758]: DEBUG oslo_vmware.api [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Task: {'id': task-4660420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27112} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.357336] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.357717] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.357967] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.358188] env[65758]: INFO nova.compute.manager [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Took 1.11 seconds to destroy the instance on the hypervisor. [ 795.358453] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 795.358665] env[65758]: DEBUG nova.compute.manager [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 795.358767] env[65758]: DEBUG nova.network.neutron [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 795.359038] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 795.359749] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 795.361572] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 795.383839] env[65758]: DEBUG nova.network.neutron [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 795.384150] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 795.424284] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660421, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.451947] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660422, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091135} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.452604] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 795.453798] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a07bdc-1589-4b73-9116-4a4c2a3781bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.482058] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] a9ec9a64-94c7-41a5-a7a4-5e034ddfc592/a9ec9a64-94c7-41a5-a7a4-5e034ddfc592.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 795.484569] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdb4f4f8-adb2-4bff-866a-776e29beb1c8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.501771] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660423, 'name': Rename_Task, 'duration_secs': 0.399261} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.502135] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.502885] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f31f127-ba6f-47b5-948a-25ce456caddd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.509469] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 795.509469] env[65758]: value = "task-4660424" [ 795.509469] env[65758]: _type = "Task" [ 795.509469] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.517168] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 795.517168] env[65758]: value = "task-4660425" [ 795.517168] env[65758]: _type = "Task" [ 795.517168] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.526594] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660424, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.534162] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660425, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.542342] env[65758]: DEBUG nova.compute.manager [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Received event network-changed-98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 795.542342] env[65758]: DEBUG nova.compute.manager [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Refreshing instance network info cache due to event network-changed-98886eae-63b7-4cb2-a8dc-f86495b733a9. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 795.542342] env[65758]: DEBUG oslo_concurrency.lockutils [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Acquiring lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.542663] env[65758]: DEBUG oslo_concurrency.lockutils [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Acquired lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.542767] env[65758]: DEBUG nova.network.neutron [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Refreshing network info cache for port 98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 795.677827] env[65758]: DEBUG oslo_concurrency.lockutils [None req-60e6b720-b2a6-4929-920c-8a652354b718 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.679223] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 37.389s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.679516] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.679725] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 795.680190] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.532s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.680464] env[65758]: DEBUG nova.objects.instance [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lazy-loading 'resources' on Instance uuid 9118ff13-e2cf-404c-ae4d-2b9dbc52738d {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 795.684711] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22669c24-51f1-4bd5-918b-e781bb73e437 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.697385] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a09fae6-93ca-488e-9aa9-5b1d73cb1f21 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.725293] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956ec548-feca-4384-8172-c3750ea18ae8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.728940] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529a9265-a9d3-d0b9-4254-8c01c11adad6, 'name': SearchDatastore_Task, 'duration_secs': 0.023463} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.729371] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.729656] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.729980] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.730172] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.730438] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.731450] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc141745-242d-4f97-833a-8ac22f21c5b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.737829] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6063c2a9-d648-4c08-be27-799386f9a4c7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.746080] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.746365] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.775967] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eeaeaf5a-e79c-4b0e-b1e2-a4e3e6da4de0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.781048] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178288MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 795.781217] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.786889] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 795.786889] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527eb408-a117-cad1-8e99-78c66b2eb103" [ 795.786889] env[65758]: _type = "Task" [ 795.786889] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.798067] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527eb408-a117-cad1-8e99-78c66b2eb103, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.887997] env[65758]: DEBUG nova.network.neutron [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 795.921272] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660421, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.022747] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.031314] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660425, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.045285] env[65758]: WARNING neutronclient.v2_0.client [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 796.046238] env[65758]: WARNING openstack [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 796.048274] env[65758]: WARNING openstack [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 796.304244] env[65758]: WARNING neutronclient.v2_0.client [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 796.305095] env[65758]: WARNING openstack [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 796.305572] env[65758]: WARNING openstack [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 796.316818] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527eb408-a117-cad1-8e99-78c66b2eb103, 'name': SearchDatastore_Task, 'duration_secs': 0.047643} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.318496] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf7ca6b7-4137-4f4d-a969-63837db9aad4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.325633] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 796.325633] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c5dadd-9833-5d9e-e9b5-0cd9f9b45e6d" [ 796.325633] env[65758]: _type = "Task" [ 796.325633] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.340294] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c5dadd-9833-5d9e-e9b5-0cd9f9b45e6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.393471] env[65758]: INFO nova.compute.manager [-] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Took 1.03 seconds to deallocate network for instance. [ 796.421998] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660421, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.461505] env[65758]: DEBUG nova.network.neutron [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Updated VIF entry in instance network info cache for port 98886eae-63b7-4cb2-a8dc-f86495b733a9. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 796.461860] env[65758]: DEBUG nova.network.neutron [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Updating instance_info_cache with network_info: [{"id": "98886eae-63b7-4cb2-a8dc-f86495b733a9", "address": "fa:16:3e:01:17:3c", "network": {"id": "f8fcef34-18bc-4207-82e0-7764f3a21019", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1119232469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6192e0af007d495c85f98e1a72ab56eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98886eae-63", "ovs_interfaceid": "98886eae-63b7-4cb2-a8dc-f86495b733a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 796.530908] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660424, 'name': ReconfigVM_Task, 'duration_secs': 0.844277} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.539164] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Reconfigured VM instance instance-00000033 to attach disk [datastore1] a9ec9a64-94c7-41a5-a7a4-5e034ddfc592/a9ec9a64-94c7-41a5-a7a4-5e034ddfc592.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 796.540355] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5ab72cb-b1e2-412c-a8e3-6c2c8830d90d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.549439] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660425, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.551768] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 796.551768] env[65758]: value = "task-4660426" [ 796.551768] env[65758]: _type = "Task" [ 796.551768] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.564133] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660426, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.838373] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c5dadd-9833-5d9e-e9b5-0cd9f9b45e6d, 'name': SearchDatastore_Task, 'duration_secs': 0.01338} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.838660] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.838953] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] b7e2a3d9-7db3-40b3-98a5-c6e6e040a947/b7e2a3d9-7db3-40b3-98a5-c6e6e040a947.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.839379] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-996e0fac-fe6f-4ef3-8545-b28fabef8d00 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.852958] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 796.852958] env[65758]: value = "task-4660427" [ 796.852958] env[65758]: _type = "Task" [ 796.852958] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.864571] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.902267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.923660] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660421, 'name': CloneVM_Task, 'duration_secs': 1.914802} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.924017] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Created linked-clone VM from snapshot [ 796.924976] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6726ed74-4877-4db0-b110-ccfdf3565120 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.936812] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Uploading image da0c0205-17bb-450e-844c-4ca10de1c712 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 796.964292] env[65758]: DEBUG oslo_concurrency.lockutils [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Releasing lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.964628] env[65758]: DEBUG nova.compute.manager [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Received event network-changed-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 796.964898] env[65758]: DEBUG nova.compute.manager [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Refreshing instance network info cache due to event network-changed-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 796.965288] env[65758]: DEBUG oslo_concurrency.lockutils [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Acquiring lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.965376] env[65758]: DEBUG oslo_concurrency.lockutils [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Acquired lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.965469] env[65758]: DEBUG nova.network.neutron [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Refreshing network info cache for port 6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 797.000415] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f84de9-eb56-42cd-b217-bc34c045470c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.006375] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4105fa-ffe4-49aa-89dd-8de5209ce43a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.046552] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d711f5d6-02ac-4667-8962-45f3e955f773 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.050949] env[65758]: DEBUG oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 797.050949] env[65758]: value = "vm-909912" [ 797.050949] env[65758]: _type = "VirtualMachine" [ 797.050949] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 797.051242] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-151052db-7e83-46b4-b755-b73fe732384a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.063427] env[65758]: DEBUG oslo_vmware.api [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660425, 'name': PowerOnVM_Task, 'duration_secs': 1.289618} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.065239] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.066185] env[65758]: INFO nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Took 14.13 seconds to spawn the instance on the hypervisor. [ 797.066185] env[65758]: DEBUG nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 797.067040] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fba41c-0d3a-4674-9532-cc9ff1dd322e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.071199] env[65758]: DEBUG oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lease: (returnval){ [ 797.071199] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fb3856-89fa-5c40-f235-1fb05e15a787" [ 797.071199] env[65758]: _type = "HttpNfcLease" [ 797.071199] env[65758]: } obtained for exporting VM: (result){ [ 797.071199] env[65758]: value = "vm-909912" [ 797.071199] env[65758]: _type = "VirtualMachine" [ 797.071199] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 797.071528] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the lease: (returnval){ [ 797.071528] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fb3856-89fa-5c40-f235-1fb05e15a787" [ 797.071528] env[65758]: _type = "HttpNfcLease" [ 797.071528] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 797.075594] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2627c7-42ee-4d7a-985a-04e1ba08d6f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.078651] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660426, 'name': Rename_Task, 'duration_secs': 0.199561} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.082973] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 797.093193] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f37d3376-21f8-4af7-9cbc-12df0b54d9e8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.095273] env[65758]: DEBUG nova.compute.provider_tree [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.103597] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 797.103597] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fb3856-89fa-5c40-f235-1fb05e15a787" [ 797.103597] env[65758]: _type = "HttpNfcLease" [ 797.103597] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 797.104396] env[65758]: DEBUG oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 797.104396] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fb3856-89fa-5c40-f235-1fb05e15a787" [ 797.104396] env[65758]: _type = "HttpNfcLease" [ 797.104396] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 797.105180] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68ba5ff-6014-469d-8271-0eabb946a855 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.109244] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 797.109244] env[65758]: value = "task-4660429" [ 797.109244] env[65758]: _type = "Task" [ 797.109244] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.119103] env[65758]: DEBUG oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240a1f6-3b9e-1e32-076f-93a7ee39bdbd/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 797.120529] env[65758]: DEBUG oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240a1f6-3b9e-1e32-076f-93a7ee39bdbd/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 797.181560] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660429, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.366391] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660427, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.433340] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4691468b-f260-49df-bfdd-9355fdfd1c8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.468813] env[65758]: WARNING neutronclient.v2_0.client [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 797.469561] env[65758]: WARNING openstack [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 797.469914] env[65758]: WARNING openstack [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 797.603452] env[65758]: DEBUG nova.scheduler.client.report [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.620359] env[65758]: INFO nova.compute.manager [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Took 60.25 seconds to build instance. [ 797.627077] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660429, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.748594] env[65758]: WARNING neutronclient.v2_0.client [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 797.749399] env[65758]: WARNING openstack [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 797.749769] env[65758]: WARNING openstack [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 797.868410] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660427, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766277} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.868792] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] b7e2a3d9-7db3-40b3-98a5-c6e6e040a947/b7e2a3d9-7db3-40b3-98a5-c6e6e040a947.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.869101] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.869292] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de0ca1c9-a785-452f-b59b-00839cf72e3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.878046] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 797.878046] env[65758]: value = "task-4660430" [ 797.878046] env[65758]: _type = "Task" [ 797.878046] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.890340] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.901412] env[65758]: DEBUG nova.network.neutron [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Updated VIF entry in instance network info cache for port 6691e2d5-9b50-4c74-a64b-2c6f98ae2a44. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 797.902641] env[65758]: DEBUG nova.network.neutron [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Updating instance_info_cache with network_info: [{"id": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "address": "fa:16:3e:d6:80:6a", "network": {"id": "d7cf91d0-3744-4e97-b6c8-5d962fe28826", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-207695677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d90d1b4e23241798a6e7e14aa2ebc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6691e2d5-9b", "ovs_interfaceid": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 798.113443] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.433s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.116889] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.049s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.117183] env[65758]: DEBUG nova.objects.instance [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'resources' on Instance uuid 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.129224] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5c43463d-27f6-480c-9293-f1787918d768 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.748s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.137738] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660429, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.160614] env[65758]: INFO nova.scheduler.client.report [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleted allocations for instance 9118ff13-e2cf-404c-ae4d-2b9dbc52738d [ 798.392196] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085722} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.393047] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 798.394825] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75f3a86-1964-4cf3-bd57-63e0fa737e05 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.421233] env[65758]: DEBUG oslo_concurrency.lockutils [req-dde02b62-08b6-473e-9ecd-e490d4cbe147 req-51700f39-2d49-4f43-9d57-64e459f4c24b service nova] Releasing lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.431117] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] b7e2a3d9-7db3-40b3-98a5-c6e6e040a947/b7e2a3d9-7db3-40b3-98a5-c6e6e040a947.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.432324] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2db17fe-492c-40bc-a1ed-f7ff5a9eb88e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.455893] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 798.455893] env[65758]: value = "task-4660431" [ 798.455893] env[65758]: _type = "Task" [ 798.455893] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.466549] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660431, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.632044] env[65758]: DEBUG oslo_vmware.api [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660429, 'name': PowerOnVM_Task, 'duration_secs': 1.338215} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.632870] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 798.633122] env[65758]: INFO nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Took 10.44 seconds to spawn the instance on the hypervisor. [ 798.633360] env[65758]: DEBUG nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 798.633732] env[65758]: DEBUG nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 798.639644] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad48a7e-ba30-47d5-b9fd-35d80ef69c97 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.674426] env[65758]: DEBUG oslo_concurrency.lockutils [None req-689b3fba-d914-4ff3-94e6-392b392d2dc7 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "9118ff13-e2cf-404c-ae4d-2b9dbc52738d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.193s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.971619] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660431, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.167640] env[65758]: INFO nova.compute.manager [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Took 56.58 seconds to build instance. [ 799.181178] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.368711] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36188c8-2098-4c55-b440-b914c1c420d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.382173] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56634662-8460-495c-a8f9-e2d4bfef9e58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.423204] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab660d62-ec97-4068-8095-16451e4d4c4e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.433315] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de520ba-2e00-4674-98b4-877096c03ff7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.452790] env[65758]: DEBUG nova.compute.provider_tree [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.469996] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660431, 'name': ReconfigVM_Task, 'duration_secs': 0.733598} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.470380] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Reconfigured VM instance instance-00000034 to attach disk [datastore1] b7e2a3d9-7db3-40b3-98a5-c6e6e040a947/b7e2a3d9-7db3-40b3-98a5-c6e6e040a947.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 799.471210] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa367aaa-47b8-4eeb-80a2-91e26e3ad3ef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.480627] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 799.480627] env[65758]: value = "task-4660432" [ 799.480627] env[65758]: _type = "Task" [ 799.480627] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.491752] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660432, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.675428] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7624ec81-48a8-4207-b4f6-1341b016409d tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.470s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.899040] env[65758]: DEBUG nova.compute.manager [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Received event network-changed-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 799.899290] env[65758]: DEBUG nova.compute.manager [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Refreshing instance network info cache due to event network-changed-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 799.899509] env[65758]: DEBUG oslo_concurrency.lockutils [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] Acquiring lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.899628] env[65758]: DEBUG oslo_concurrency.lockutils [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] Acquired lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.899809] env[65758]: DEBUG nova.network.neutron [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Refreshing network info cache for port 6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 799.956098] env[65758]: DEBUG nova.scheduler.client.report [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 799.994154] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660432, 'name': Rename_Task, 'duration_secs': 0.213232} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.994308] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.994613] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87593c38-aa37-4f2f-a7b6-809da646a938 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.004327] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 800.004327] env[65758]: value = "task-4660433" [ 800.004327] env[65758]: _type = "Task" [ 800.004327] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.014471] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.179031] env[65758]: DEBUG nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 800.403818] env[65758]: WARNING neutronclient.v2_0.client [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 800.403818] env[65758]: WARNING openstack [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 800.403818] env[65758]: WARNING openstack [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 800.461576] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.345s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.465316] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.747s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.468359] env[65758]: INFO nova.compute.claims [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.515904] env[65758]: INFO nova.scheduler.client.report [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted allocations for instance 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1 [ 800.525676] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660433, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.717050] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.812439] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "81f961c3-ec8f-4281-be18-5d605fa73ecc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.812703] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.813177] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.813292] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.813472] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.818414] env[65758]: INFO nova.compute.manager [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Terminating instance [ 801.020782] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660433, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.029027] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bb4870e9-6ff1-4fe8-9c8b-8395a2d5d912 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "4fda2aa0-451c-4c0f-a03a-19ea8b083ba1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.760s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.046179] env[65758]: WARNING neutronclient.v2_0.client [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 801.049031] env[65758]: WARNING openstack [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 801.050088] env[65758]: WARNING openstack [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 801.206358] env[65758]: DEBUG nova.network.neutron [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Updated VIF entry in instance network info cache for port 6691e2d5-9b50-4c74-a64b-2c6f98ae2a44. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 801.206712] env[65758]: DEBUG nova.network.neutron [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Updating instance_info_cache with network_info: [{"id": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "address": "fa:16:3e:d6:80:6a", "network": {"id": "d7cf91d0-3744-4e97-b6c8-5d962fe28826", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-207695677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d90d1b4e23241798a6e7e14aa2ebc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6691e2d5-9b", "ovs_interfaceid": "6691e2d5-9b50-4c74-a64b-2c6f98ae2a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 801.328060] env[65758]: DEBUG nova.compute.manager [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 801.328060] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 801.328060] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6924ae70-0df9-4dba-a25d-ea047223caf3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.335014] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 801.335612] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0e4605e-bd76-4823-9174-d7843da1c280 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.345424] env[65758]: DEBUG oslo_vmware.api [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 801.345424] env[65758]: value = "task-4660434" [ 801.345424] env[65758]: _type = "Task" [ 801.345424] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.359198] env[65758]: DEBUG oslo_vmware.api [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660434, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.527906] env[65758]: DEBUG oslo_vmware.api [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660433, 'name': PowerOnVM_Task, 'duration_secs': 1.121128} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.529681] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 801.529681] env[65758]: INFO nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Took 9.47 seconds to spawn the instance on the hypervisor. [ 801.529681] env[65758]: DEBUG nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 801.529912] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc91b578-c679-4b6e-93e8-3f08be336b12 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.709964] env[65758]: DEBUG oslo_concurrency.lockutils [req-5b799e86-7830-4453-b5b5-08aa4d802cd2 req-e13cb3a8-52f7-4395-b28e-3f4fcd16ad31 service nova] Releasing lock "refresh_cache-56ff4122-a999-4caf-b805-0754a66d6bc7" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.857759] env[65758]: DEBUG oslo_vmware.api [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660434, 'name': PowerOffVM_Task, 'duration_secs': 0.291325} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.861112] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.863477] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.863795] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c768675-4a84-440a-945b-673fac0c5675 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.017740] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 802.017740] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 802.017740] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Deleting the datastore file [datastore1] 81f961c3-ec8f-4281-be18-5d605fa73ecc {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 802.018060] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bbd6d6f-12d6-4249-99b6-879e84c44bb7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.032702] env[65758]: DEBUG oslo_vmware.api [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for the task: (returnval){ [ 802.032702] env[65758]: value = "task-4660436" [ 802.032702] env[65758]: _type = "Task" [ 802.032702] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.041864] env[65758]: DEBUG oslo_vmware.api [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660436, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.057829] env[65758]: INFO nova.compute.manager [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Took 52.16 seconds to build instance. [ 802.238632] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21fec609-50cf-44ef-ada0-f28089631b21 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.248958] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8df560d-96a2-42e7-aa55-a7a80c806f66 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.304095] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9089e6-2627-4084-877d-8d3d66a99448 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.312328] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b69dc5-b688-4808-a2c7-4690d77aa0b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.329482] env[65758]: DEBUG nova.compute.provider_tree [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.542813] env[65758]: DEBUG oslo_vmware.api [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660436, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.566498] env[65758]: DEBUG oslo_concurrency.lockutils [None req-58aed33d-7de6-453b-a601-ba50c915aa29 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.102s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.656736] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.657015] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.835636] env[65758]: DEBUG nova.scheduler.client.report [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.046792] env[65758]: DEBUG oslo_vmware.api [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Task: {'id': task-4660436, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.543562} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.046792] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.046792] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.047986] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.048174] env[65758]: INFO nova.compute.manager [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Took 1.72 seconds to destroy the instance on the hypervisor. [ 803.048432] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 803.048642] env[65758]: DEBUG nova.compute.manager [-] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 803.048813] env[65758]: DEBUG nova.network.neutron [-] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 803.048995] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 803.049564] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 803.049843] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 803.069189] env[65758]: DEBUG nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 803.128435] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 803.343805] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.878s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.344760] env[65758]: DEBUG nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 803.349220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.194s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.349351] env[65758]: DEBUG nova.objects.instance [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lazy-loading 'resources' on Instance uuid 549673ec-3d75-4aad-a001-014f3f53a6b0 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.601620] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.668132] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquiring lock "56ff4122-a999-4caf-b805-0754a66d6bc7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.668132] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "56ff4122-a999-4caf-b805-0754a66d6bc7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.668414] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquiring lock "56ff4122-a999-4caf-b805-0754a66d6bc7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.668695] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "56ff4122-a999-4caf-b805-0754a66d6bc7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.668873] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "56ff4122-a999-4caf-b805-0754a66d6bc7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.674967] env[65758]: INFO nova.compute.manager [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Terminating instance [ 803.860538] env[65758]: DEBUG nova.compute.utils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 803.866955] env[65758]: DEBUG nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 803.867341] env[65758]: DEBUG nova.network.neutron [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 803.867773] env[65758]: WARNING neutronclient.v2_0.client [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 803.869055] env[65758]: WARNING neutronclient.v2_0.client [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 803.870187] env[65758]: WARNING openstack [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 803.871865] env[65758]: WARNING openstack [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 803.881858] env[65758]: DEBUG nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 804.088345] env[65758]: DEBUG nova.policy [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd63bda0326124f8eb9ee6d515a6a7320', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '111dc87614bb42e2bc66ae1bfb092795', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 804.175337] env[65758]: DEBUG nova.network.neutron [-] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 804.178642] env[65758]: DEBUG nova.compute.manager [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 804.178642] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 804.179218] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eab4010-94b2-490c-9c4c-75a542d49040 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.195556] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.196994] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0332979-9be9-43ea-bec0-e8b012990c2b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.209234] env[65758]: DEBUG oslo_vmware.api [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 804.209234] env[65758]: value = "task-4660437" [ 804.209234] env[65758]: _type = "Task" [ 804.209234] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.223838] env[65758]: DEBUG oslo_vmware.api [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.535012] env[65758]: DEBUG nova.compute.manager [req-ae7888cf-7adb-4e96-add5-004f958e16aa req-c6d0e8e9-9595-4c0a-a232-ee6daaed372c service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Received event network-vif-deleted-99223b4e-c230-4330-8b02-e0b49b37f50f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 804.633093] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b96c242-a81c-454d-9ea8-f8d066cf1532 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.636721] env[65758]: DEBUG nova.network.neutron [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Successfully created port: ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 804.646970] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da43ebff-bb9a-46b7-929b-bf8641e5a096 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.693240] env[65758]: INFO nova.compute.manager [-] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Took 1.64 seconds to deallocate network for instance. [ 804.696201] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be390cc0-5390-44f5-a229-7785c6515222 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.719228] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7e0f04-2a90-4a43-930b-1ed220ef5543 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.740502] env[65758]: DEBUG nova.compute.provider_tree [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.742050] env[65758]: DEBUG oslo_vmware.api [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660437, 'name': PowerOffVM_Task, 'duration_secs': 0.440277} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.742561] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 804.742728] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 804.743092] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a96ecdab-4fe8-4aab-9b40-4a0b1678e543 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.832512] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 804.832686] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 804.832884] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Deleting the datastore file [datastore2] 56ff4122-a999-4caf-b805-0754a66d6bc7 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 804.833284] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74be65c7-5e48-4db7-9b5e-b12325be2e82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.841212] env[65758]: DEBUG oslo_vmware.api [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for the task: (returnval){ [ 804.841212] env[65758]: value = "task-4660439" [ 804.841212] env[65758]: _type = "Task" [ 804.841212] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.854021] env[65758]: DEBUG oslo_vmware.api [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.897060] env[65758]: DEBUG nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 804.943252] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 804.943562] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 804.943737] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 804.943958] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 804.944138] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 804.944303] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 804.944541] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 804.944716] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 804.944912] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 804.945506] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 804.945506] env[65758]: DEBUG nova.virt.hardware [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 804.946362] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fac8ff-a7a9-4459-ab43-a3a238e74150 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.955761] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c58dfbf-d14a-4bee-a5c6-d3eb25baeee5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.029683] env[65758]: DEBUG nova.compute.manager [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 805.169911] env[65758]: DEBUG nova.compute.manager [req-bcf1e2be-597d-4ab9-ba99-6bf76981fff4 req-14c8fff4-695e-48f5-9ca7-cbea958e6e0a service nova] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Received event network-vif-deleted-6cc91558-00db-46cf-a8a1-93f06ecf3e20 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 805.209119] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.243966] env[65758]: DEBUG nova.scheduler.client.report [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.355241] env[65758]: DEBUG oslo_vmware.api [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Task: {'id': task-4660439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.441926} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.355590] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 805.355856] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 805.356142] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 805.356381] env[65758]: INFO nova.compute.manager [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 805.356736] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 805.357034] env[65758]: DEBUG nova.compute.manager [-] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 805.357892] env[65758]: DEBUG nova.network.neutron [-] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 805.357892] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 805.358445] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 805.358857] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 805.412872] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 805.553951] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.611974] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquiring lock "79c63944-c4c8-4c7c-bc42-3f958d737e66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.612449] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "79c63944-c4c8-4c7c-bc42-3f958d737e66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.756078] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.407s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.758876] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.983s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.760352] env[65758]: INFO nova.compute.claims [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.790689] env[65758]: INFO nova.scheduler.client.report [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Deleted allocations for instance 549673ec-3d75-4aad-a001-014f3f53a6b0 [ 806.197073] env[65758]: DEBUG nova.network.neutron [-] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 806.298907] env[65758]: DEBUG nova.network.neutron [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Successfully updated port: ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 806.306529] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20e74943-844e-45ef-94ed-ee0ac8829401 tempest-ServersAdminNegativeTestJSON-152403051 tempest-ServersAdminNegativeTestJSON-152403051-project-member] Lock "549673ec-3d75-4aad-a001-014f3f53a6b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.601s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.699949] env[65758]: INFO nova.compute.manager [-] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Took 1.34 seconds to deallocate network for instance. [ 806.801997] env[65758]: DEBUG nova.compute.manager [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Received event network-changed-98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 806.802535] env[65758]: DEBUG nova.compute.manager [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Refreshing instance network info cache due to event network-changed-98886eae-63b7-4cb2-a8dc-f86495b733a9. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 806.803486] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Acquiring lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.803486] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Acquired lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.803486] env[65758]: DEBUG nova.network.neutron [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Refreshing network info cache for port 98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 806.813789] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.813789] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.813789] env[65758]: DEBUG nova.network.neutron [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 806.966151] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.966441] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.206416] env[65758]: DEBUG nova.compute.manager [req-677f2eee-db6b-4738-b332-af4fb9258f92 req-fc2742f1-9527-4cfb-a707-9957b46322f6 service nova] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Received event network-vif-deleted-6691e2d5-9b50-4c74-a64b-2c6f98ae2a44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 807.215776] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.316382] env[65758]: WARNING openstack [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 807.316848] env[65758]: WARNING openstack [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 807.324664] env[65758]: WARNING neutronclient.v2_0.client [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 807.325294] env[65758]: WARNING openstack [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 807.327722] env[65758]: WARNING openstack [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 807.425607] env[65758]: DEBUG nova.network.neutron [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 807.541472] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66647d9c-ed69-48fe-97fd-22fc209895a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.549810] env[65758]: WARNING neutronclient.v2_0.client [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 807.550909] env[65758]: WARNING openstack [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 807.551458] env[65758]: WARNING openstack [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 807.560987] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0597c8a9-9321-4f5b-bb03-4e954e8066a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.600382] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f094e325-3640-4d05-9d83-27cbcd128918 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.613339] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1323a258-09e9-4535-8127-a41bc624b39c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.630385] env[65758]: DEBUG nova.compute.provider_tree [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.678592] env[65758]: WARNING neutronclient.v2_0.client [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 807.679365] env[65758]: WARNING openstack [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 807.679778] env[65758]: WARNING openstack [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 807.703553] env[65758]: DEBUG nova.network.neutron [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Updating instance_info_cache with network_info: [{"id": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "address": "fa:16:3e:7b:cc:ba", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec4eeaee-4c", "ovs_interfaceid": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 807.826347] env[65758]: DEBUG nova.network.neutron [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Updated VIF entry in instance network info cache for port 98886eae-63b7-4cb2-a8dc-f86495b733a9. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 807.829721] env[65758]: DEBUG nova.network.neutron [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Updating instance_info_cache with network_info: [{"id": "98886eae-63b7-4cb2-a8dc-f86495b733a9", "address": "fa:16:3e:01:17:3c", "network": {"id": "f8fcef34-18bc-4207-82e0-7764f3a21019", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1119232469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6192e0af007d495c85f98e1a72ab56eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98886eae-63", "ovs_interfaceid": "98886eae-63b7-4cb2-a8dc-f86495b733a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 808.137060] env[65758]: DEBUG nova.scheduler.client.report [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.207841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Releasing lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.208554] env[65758]: DEBUG nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Instance network_info: |[{"id": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "address": "fa:16:3e:7b:cc:ba", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec4eeaee-4c", "ovs_interfaceid": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 808.209292] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:cc:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec4eeaee-4c33-4f1c-93a9-038d455eff39', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.216862] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Creating folder: Project (111dc87614bb42e2bc66ae1bfb092795). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.217199] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cceeb75f-375c-4b66-bddf-3eb39fc03af6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.233070] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Created folder: Project (111dc87614bb42e2bc66ae1bfb092795) in parent group-v909763. [ 808.233864] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Creating folder: Instances. Parent ref: group-v909913. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.233864] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d94a054d-036f-4a45-9572-219d3f271177 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.246394] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Created folder: Instances in parent group-v909913. [ 808.246394] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 808.246394] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.246394] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-859bc6c9-e761-4a8d-addc-585e3f533089 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.275109] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.275109] env[65758]: value = "task-4660442" [ 808.275109] env[65758]: _type = "Task" [ 808.275109] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.285075] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660442, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.331691] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Releasing lock "refresh_cache-b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.332059] env[65758]: DEBUG nova.compute.manager [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Received event network-vif-plugged-ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 808.332370] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Acquiring lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.332669] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.332855] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.333028] env[65758]: DEBUG nova.compute.manager [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] No waiting events found dispatching network-vif-plugged-ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 808.333211] env[65758]: WARNING nova.compute.manager [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Received unexpected event network-vif-plugged-ec4eeaee-4c33-4f1c-93a9-038d455eff39 for instance with vm_state building and task_state spawning. [ 808.333373] env[65758]: DEBUG nova.compute.manager [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Received event network-changed-ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 808.333517] env[65758]: DEBUG nova.compute.manager [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Refreshing instance network info cache due to event network-changed-ec4eeaee-4c33-4f1c-93a9-038d455eff39. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 808.333701] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Acquiring lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.333849] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Acquired lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.334014] env[65758]: DEBUG nova.network.neutron [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Refreshing network info cache for port ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 808.564648] env[65758]: DEBUG oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240a1f6-3b9e-1e32-076f-93a7ee39bdbd/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 808.566098] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aaf8c73-3341-4bb3-bb5a-b6abdec25573 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.573598] env[65758]: DEBUG oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240a1f6-3b9e-1e32-076f-93a7ee39bdbd/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 808.573766] env[65758]: ERROR oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240a1f6-3b9e-1e32-076f-93a7ee39bdbd/disk-0.vmdk due to incomplete transfer. [ 808.574034] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-01cdaed3-15cf-405c-8ceb-8be672af499f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.583344] env[65758]: DEBUG oslo_vmware.rw_handles [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240a1f6-3b9e-1e32-076f-93a7ee39bdbd/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 808.583632] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Uploaded image da0c0205-17bb-450e-844c-4ca10de1c712 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 808.586090] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 808.586396] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f3f2cb13-4986-4236-a293-c4e489272116 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.595895] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 808.595895] env[65758]: value = "task-4660443" [ 808.595895] env[65758]: _type = "Task" [ 808.595895] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.606102] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660443, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.644384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.885s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.644384] env[65758]: DEBUG nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 808.650312] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.889s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.650312] env[65758]: INFO nova.compute.claims [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.788857] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660442, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.838110] env[65758]: WARNING neutronclient.v2_0.client [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 808.838494] env[65758]: WARNING openstack [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 808.838934] env[65758]: WARNING openstack [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.034660] env[65758]: WARNING neutronclient.v2_0.client [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 809.035716] env[65758]: WARNING openstack [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 809.035716] env[65758]: WARNING openstack [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.106605] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660443, 'name': Destroy_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.145012] env[65758]: DEBUG nova.network.neutron [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Updated VIF entry in instance network info cache for port ec4eeaee-4c33-4f1c-93a9-038d455eff39. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 809.145423] env[65758]: DEBUG nova.network.neutron [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Updating instance_info_cache with network_info: [{"id": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "address": "fa:16:3e:7b:cc:ba", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec4eeaee-4c", "ovs_interfaceid": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 809.157756] env[65758]: DEBUG nova.compute.utils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 809.159851] env[65758]: DEBUG nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 809.160155] env[65758]: DEBUG nova.network.neutron [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 809.160518] env[65758]: WARNING neutronclient.v2_0.client [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 809.160881] env[65758]: WARNING neutronclient.v2_0.client [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 809.161604] env[65758]: WARNING openstack [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 809.162014] env[65758]: WARNING openstack [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 809.208210] env[65758]: DEBUG nova.policy [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffc98a9206034c3e9afb5a1685ff3688', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60dcbdfe17cb46fa8dfc1b7690f28b1f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 809.286328] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660442, 'name': CreateVM_Task, 'duration_secs': 0.614475} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.286540] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.287648] env[65758]: WARNING neutronclient.v2_0.client [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 809.287648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.287648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.287973] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 809.288362] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f88e35b-d588-4bc7-9783-d16c76f7239a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.294558] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 809.294558] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f69257-a4c0-9299-e068-0663ddf324a2" [ 809.294558] env[65758]: _type = "Task" [ 809.294558] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.305511] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f69257-a4c0-9299-e068-0663ddf324a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.539619] env[65758]: DEBUG nova.network.neutron [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Successfully created port: 06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 809.608750] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660443, 'name': Destroy_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.648842] env[65758]: DEBUG oslo_concurrency.lockutils [req-c9d1f2ab-ebea-491d-8eaf-4f310eea0744 req-335bd6cb-3614-4cab-b930-2b22edbb4911 service nova] Releasing lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.670367] env[65758]: DEBUG nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 809.808497] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f69257-a4c0-9299-e068-0663ddf324a2, 'name': SearchDatastore_Task, 'duration_secs': 0.011818} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.808816] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.809106] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.809355] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.809499] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.809672] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.809942] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f2d3724-7b9d-4634-8e1c-6c5f532b04a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.820549] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.820549] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.821218] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-584889e2-84cf-48d1-a5f1-d9c9ced640e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.828556] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 809.828556] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525d9fd9-9f97-92f2-7340-dffe6e679770" [ 809.828556] env[65758]: _type = "Task" [ 809.828556] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.843787] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525d9fd9-9f97-92f2-7340-dffe6e679770, 'name': SearchDatastore_Task, 'duration_secs': 0.010588} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.846967] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9db75091-5651-436a-b54e-cfc7fc946026 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.854747] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 809.854747] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524e8f08-9d90-10df-ab33-1f6d918e7aa0" [ 809.854747] env[65758]: _type = "Task" [ 809.854747] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.866244] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524e8f08-9d90-10df-ab33-1f6d918e7aa0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.110850] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660443, 'name': Destroy_Task, 'duration_secs': 1.419274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.111151] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Destroyed the VM [ 810.111385] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 810.111643] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c643843a-4eb3-4fcc-92a4-b25e98d8300f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.119689] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 810.119689] env[65758]: value = "task-4660444" [ 810.119689] env[65758]: _type = "Task" [ 810.119689] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.131891] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660444, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.291139] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37784ef5-dd4a-40db-afdb-a44ee1047712 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.300308] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ac8646-56aa-4dd2-a616-9893ad29d039 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.331436] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bc2f86-ff04-42d0-a5d8-c5502b529fb0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.340681] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa586e1-59ea-476d-b69d-52c2d5b69446 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.355241] env[65758]: DEBUG nova.compute.provider_tree [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.365803] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524e8f08-9d90-10df-ab33-1f6d918e7aa0, 'name': SearchDatastore_Task, 'duration_secs': 0.011082} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.366625] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.366870] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720/7c0e6911-4f85-4b47-a7e9-84d0e3bb5720.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.367218] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ae58bce-00bb-47ea-956b-00304575ddec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.377183] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 810.377183] env[65758]: value = "task-4660445" [ 810.377183] env[65758]: _type = "Task" [ 810.377183] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.387133] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.635000] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660444, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.681457] env[65758]: DEBUG nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 810.721045] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 810.721415] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 810.721534] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 810.721803] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 810.721964] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 810.722122] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 810.722450] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 810.722651] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 810.722917] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 810.723195] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 810.723425] env[65758]: DEBUG nova.virt.hardware [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 810.724550] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcf3bc1-4432-440d-9756-2dbbec2a62cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.741280] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edabf936-4250-45ec-9b91-34d2285ebeaf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.862298] env[65758]: DEBUG nova.scheduler.client.report [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 810.893017] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660445, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.133902] env[65758]: DEBUG oslo_vmware.api [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660444, 'name': RemoveSnapshot_Task, 'duration_secs': 0.672031} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.134286] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 811.134651] env[65758]: INFO nova.compute.manager [None req-c405b3ed-9c21-47ce-a50e-81c4f3ab8512 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Took 18.39 seconds to snapshot the instance on the hypervisor. [ 811.235813] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.236112] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.236320] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.236503] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.236714] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.239089] env[65758]: INFO nova.compute.manager [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Terminating instance [ 811.256205] env[65758]: DEBUG nova.network.neutron [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Successfully updated port: 06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 811.368817] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.721s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.369410] env[65758]: DEBUG nova.compute.manager [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 811.372419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.986s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.372560] env[65758]: DEBUG nova.objects.instance [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lazy-loading 'resources' on Instance uuid fb379346-f17a-4433-bb55-2b72025e9a61 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 811.394673] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512798} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.395251] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720/7c0e6911-4f85-4b47-a7e9-84d0e3bb5720.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.395505] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.395781] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a8725d2-dd4c-4b31-bfc7-d175cef9ecbd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.406157] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 811.406157] env[65758]: value = "task-4660446" [ 811.406157] env[65758]: _type = "Task" [ 811.406157] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.421246] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.743422] env[65758]: DEBUG nova.compute.manager [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 811.743649] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 811.744685] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a1308f-54e8-4bb9-865d-488d2251b6bc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.753816] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 811.754139] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1492c05e-6169-495d-b2aa-87686836ac54 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.762515] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.762515] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquired lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.762515] env[65758]: DEBUG nova.network.neutron [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 811.762515] env[65758]: DEBUG oslo_vmware.api [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 811.762515] env[65758]: value = "task-4660447" [ 811.762515] env[65758]: _type = "Task" [ 811.762515] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.772694] env[65758]: DEBUG oslo_vmware.api [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.879659] env[65758]: DEBUG nova.compute.utils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 811.882795] env[65758]: DEBUG nova.compute.manager [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 811.921782] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072247} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.922110] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.924539] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdaae7b-6f15-4d53-a575-5f866b0adf50 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.954458] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720/7c0e6911-4f85-4b47-a7e9-84d0e3bb5720.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.957656] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b6b97cf-71d6-48ce-b081-8f4477f1c473 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.981085] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 811.981085] env[65758]: value = "task-4660448" [ 811.981085] env[65758]: _type = "Task" [ 811.981085] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.995616] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660448, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.142240] env[65758]: DEBUG nova.compute.manager [req-f1117b57-5757-4878-ae9c-622cac532c48 req-6a7cc356-1dab-4ce3-b7f8-e2ecaa1179fb service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Received event network-vif-plugged-06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 812.142472] env[65758]: DEBUG oslo_concurrency.lockutils [req-f1117b57-5757-4878-ae9c-622cac532c48 req-6a7cc356-1dab-4ce3-b7f8-e2ecaa1179fb service nova] Acquiring lock "596a5005-3607-44a2-9c0e-f1a56865011c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.142673] env[65758]: DEBUG oslo_concurrency.lockutils [req-f1117b57-5757-4878-ae9c-622cac532c48 req-6a7cc356-1dab-4ce3-b7f8-e2ecaa1179fb service nova] Lock "596a5005-3607-44a2-9c0e-f1a56865011c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.142832] env[65758]: DEBUG oslo_concurrency.lockutils [req-f1117b57-5757-4878-ae9c-622cac532c48 req-6a7cc356-1dab-4ce3-b7f8-e2ecaa1179fb service nova] Lock "596a5005-3607-44a2-9c0e-f1a56865011c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.143043] env[65758]: DEBUG nova.compute.manager [req-f1117b57-5757-4878-ae9c-622cac532c48 req-6a7cc356-1dab-4ce3-b7f8-e2ecaa1179fb service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] No waiting events found dispatching network-vif-plugged-06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 812.144156] env[65758]: WARNING nova.compute.manager [req-f1117b57-5757-4878-ae9c-622cac532c48 req-6a7cc356-1dab-4ce3-b7f8-e2ecaa1179fb service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Received unexpected event network-vif-plugged-06d61806-4596-49d1-968d-8ba5ad0b379d for instance with vm_state building and task_state spawning. [ 812.265818] env[65758]: WARNING openstack [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 812.266959] env[65758]: WARNING openstack [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 812.293654] env[65758]: DEBUG oslo_vmware.api [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660447, 'name': PowerOffVM_Task, 'duration_secs': 0.371128} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.293883] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 812.294224] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 812.294423] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe7ae4f0-8cba-48b4-ab3d-d3df306e53b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.368095] env[65758]: DEBUG nova.network.neutron [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 812.380831] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 812.380831] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 812.380831] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Deleting the datastore file [datastore1] 1e249ca9-a7a8-440f-832b-a8f5d84ada8b {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.380831] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7806e58e-38c2-4049-95a3-db827e571cde {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.383695] env[65758]: DEBUG nova.compute.manager [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 812.396919] env[65758]: DEBUG oslo_vmware.api [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 812.396919] env[65758]: value = "task-4660450" [ 812.396919] env[65758]: _type = "Task" [ 812.396919] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.407707] env[65758]: DEBUG oslo_vmware.api [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.499909] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660448, 'name': ReconfigVM_Task, 'duration_secs': 0.36317} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.500398] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720/7c0e6911-4f85-4b47-a7e9-84d0e3bb5720.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.501226] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35014c88-48df-49a7-a330-a0a27c793291 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.509414] env[65758]: WARNING neutronclient.v2_0.client [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 812.510144] env[65758]: WARNING openstack [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 812.510563] env[65758]: WARNING openstack [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 812.529917] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 812.529917] env[65758]: value = "task-4660451" [ 812.529917] env[65758]: _type = "Task" [ 812.529917] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.546489] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660451, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.645545] env[65758]: DEBUG nova.network.neutron [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Updating instance_info_cache with network_info: [{"id": "06d61806-4596-49d1-968d-8ba5ad0b379d", "address": "fa:16:3e:b5:ab:9e", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06d61806-45", "ovs_interfaceid": "06d61806-4596-49d1-968d-8ba5ad0b379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 812.732007] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f64f0e-26f5-4925-a123-23b6149fc780 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.743320] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbff25aa-140d-4f02-a5f8-ee1d44e9d515 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.780823] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f691175a-7618-4c06-9a51-9a9d9e763b20 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.788066] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.788695] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.794612] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bb8a30-c507-489a-bfbe-2c38ef79faa0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.810834] env[65758]: DEBUG nova.compute.provider_tree [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.911395] env[65758]: DEBUG oslo_vmware.api [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207638} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.911737] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 812.911800] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 812.912225] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 812.912225] env[65758]: INFO nova.compute.manager [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 812.912607] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 812.912813] env[65758]: DEBUG nova.compute.manager [-] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 812.912905] env[65758]: DEBUG nova.network.neutron [-] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 812.913199] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 812.913981] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 812.914263] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 812.967027] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 813.042376] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660451, 'name': Rename_Task, 'duration_secs': 0.172425} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.042705] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.043723] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65288083-76e7-4e77-b4f7-bf58e99469d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.051897] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 813.051897] env[65758]: value = "task-4660452" [ 813.051897] env[65758]: _type = "Task" [ 813.051897] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.061802] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660452, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.150212] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Releasing lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.150944] env[65758]: DEBUG nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Instance network_info: |[{"id": "06d61806-4596-49d1-968d-8ba5ad0b379d", "address": "fa:16:3e:b5:ab:9e", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06d61806-45", "ovs_interfaceid": "06d61806-4596-49d1-968d-8ba5ad0b379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 813.151357] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:ab:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9bc2632-36f9-4912-8782-8bbb789f909d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06d61806-4596-49d1-968d-8ba5ad0b379d', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 813.163869] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 813.164046] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 813.164298] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfe3f5a7-f11c-4490-acae-5b2ecba53eb0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.188943] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.188943] env[65758]: value = "task-4660453" [ 813.188943] env[65758]: _type = "Task" [ 813.188943] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.199913] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660453, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.314498] env[65758]: DEBUG nova.scheduler.client.report [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.402280] env[65758]: DEBUG nova.compute.manager [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 813.436150] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 813.436150] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 813.436354] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 813.436535] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 813.436658] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 813.436860] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 813.437089] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 813.437804] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 813.437804] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 813.437973] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 813.438192] env[65758]: DEBUG nova.virt.hardware [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 813.439533] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08970fa8-1412-462b-b050-902cd3795738 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.452878] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a36f27d-d9f6-4f9c-8a1a-1044bbf50eab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.475941] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 813.481641] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Creating folder: Project (4ccf42aff94443239bf03f0aad58567f). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.482665] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0af3339-a415-4402-a19c-b8121e2ed6e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.499754] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Created folder: Project (4ccf42aff94443239bf03f0aad58567f) in parent group-v909763. [ 813.499754] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Creating folder: Instances. Parent ref: group-v909917. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.500275] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef7bfa13-0d83-4849-8565-5d06ad12e30b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.518821] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Created folder: Instances in parent group-v909917. [ 813.519059] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 813.519616] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 813.519938] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3146540-f8f1-47b0-b2b0-f374265e90bd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.541767] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.541767] env[65758]: value = "task-4660456" [ 813.541767] env[65758]: _type = "Task" [ 813.541767] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.552500] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660456, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.561815] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660452, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.700860] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660453, 'name': CreateVM_Task, 'duration_secs': 0.437983} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.701090] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 813.701635] env[65758]: WARNING neutronclient.v2_0.client [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 813.702051] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.702218] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.702602] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 813.702893] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f598cebd-a909-474f-9ce5-612eb6066288 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.711830] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 813.711830] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5214b107-2289-cdcc-fff4-9b34879fc153" [ 813.711830] env[65758]: _type = "Task" [ 813.711830] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.724035] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5214b107-2289-cdcc-fff4-9b34879fc153, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.824157] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.452s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.827627] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 40.599s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.827627] env[65758]: DEBUG nova.objects.instance [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 813.849729] env[65758]: DEBUG nova.compute.manager [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 813.851864] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8007d66f-9c49-49b9-9292-1bf4b51e9dfd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.860405] env[65758]: INFO nova.scheduler.client.report [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Deleted allocations for instance fb379346-f17a-4433-bb55-2b72025e9a61 [ 813.997247] env[65758]: DEBUG nova.network.neutron [-] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 814.053527] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660456, 'name': CreateVM_Task, 'duration_secs': 0.414773} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.053527] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.053527] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.053527] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.053919] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 814.054207] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28c75562-7563-45c5-b287-598714cb15cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.062674] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 814.062674] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529a055f-926a-4e57-6b39-59bda109459a" [ 814.062674] env[65758]: _type = "Task" [ 814.062674] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.066839] env[65758]: DEBUG oslo_vmware.api [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660452, 'name': PowerOnVM_Task, 'duration_secs': 0.569331} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.070348] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.070451] env[65758]: INFO nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Took 9.17 seconds to spawn the instance on the hypervisor. [ 814.070588] env[65758]: DEBUG nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 814.071398] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ef615f-0d2c-42c1-b599-2c71912c3cd4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.086710] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529a055f-926a-4e57-6b39-59bda109459a, 'name': SearchDatastore_Task, 'duration_secs': 0.010457} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.088125] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.088355] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.088700] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.088768] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.088912] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.089391] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72d5adf0-e6a3-41f7-844d-f71e4676ec78 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.099566] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.099743] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.101203] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16f8d37d-2d57-40dc-94ac-9670cfd07ad7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.107219] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 814.107219] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d06065-b656-7e9c-8ab5-fb8e94bc6247" [ 814.107219] env[65758]: _type = "Task" [ 814.107219] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.116825] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d06065-b656-7e9c-8ab5-fb8e94bc6247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.223310] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5214b107-2289-cdcc-fff4-9b34879fc153, 'name': SearchDatastore_Task, 'duration_secs': 0.011522} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.223703] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.223962] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.224319] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.224508] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.224581] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.224966] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f761a60c-99e1-4198-a0c8-bb459d44b9f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.234511] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.234694] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.235445] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8572bfba-f7f9-4426-a00f-f7daef8f1a3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.241607] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 814.241607] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52881b02-940a-2667-6501-9512e328c695" [ 814.241607] env[65758]: _type = "Task" [ 814.241607] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.250255] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52881b02-940a-2667-6501-9512e328c695, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.276293] env[65758]: DEBUG nova.compute.manager [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Received event network-changed-06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 814.276596] env[65758]: DEBUG nova.compute.manager [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Refreshing instance network info cache due to event network-changed-06d61806-4596-49d1-968d-8ba5ad0b379d. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 814.276921] env[65758]: DEBUG oslo_concurrency.lockutils [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] Acquiring lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.277207] env[65758]: DEBUG oslo_concurrency.lockutils [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] Acquired lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.277388] env[65758]: DEBUG nova.network.neutron [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Refreshing network info cache for port 06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 814.369011] env[65758]: INFO nova.compute.manager [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] instance snapshotting [ 814.375723] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ec7136-7b66-4a5e-8e7c-1ecac4d3c3a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.379420] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bd84df20-d071-4d67-ae91-af24f8765ec5 tempest-ServerMetadataTestJSON-2086049374 tempest-ServerMetadataTestJSON-2086049374-project-member] Lock "fb379346-f17a-4433-bb55-2b72025e9a61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.507s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.401147] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36587c52-873d-4741-b5a2-b403dc396c03 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.502061] env[65758]: INFO nova.compute.manager [-] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Took 1.59 seconds to deallocate network for instance. [ 814.603355] env[65758]: INFO nova.compute.manager [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Took 51.91 seconds to build instance. [ 814.620255] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d06065-b656-7e9c-8ab5-fb8e94bc6247, 'name': SearchDatastore_Task, 'duration_secs': 0.009998} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.621873] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1901957-c3fa-4efd-bf59-db15f7c41392 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.628494] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 814.628494] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c856c3-28e0-620a-9a39-d7c7242eff27" [ 814.628494] env[65758]: _type = "Task" [ 814.628494] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.638105] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c856c3-28e0-620a-9a39-d7c7242eff27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.754327] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52881b02-940a-2667-6501-9512e328c695, 'name': SearchDatastore_Task, 'duration_secs': 0.010045} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.755430] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-719eda98-ad20-4e01-9b47-d505f6059add {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.763038] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 814.763038] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a26833-cc77-325f-c254-5c1c92941260" [ 814.763038] env[65758]: _type = "Task" [ 814.763038] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.772994] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a26833-cc77-325f-c254-5c1c92941260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.780370] env[65758]: WARNING neutronclient.v2_0.client [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 814.781909] env[65758]: WARNING openstack [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 814.782293] env[65758]: WARNING openstack [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 814.837297] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1835d17e-100e-42cd-9797-4993a5d106ea tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.841185] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.361s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.841185] env[65758]: INFO nova.compute.claims [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.915296] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 814.915617] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7e115b81-ffd0-4103-8587-ca3bb4346c08 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.925458] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 814.925458] env[65758]: value = "task-4660457" [ 814.925458] env[65758]: _type = "Task" [ 814.925458] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.936581] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660457, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.008609] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.104936] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dc81e69b-e1a1-4879-a0f9-e6aa448d8d4a tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.017s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.142017] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c856c3-28e0-620a-9a39-d7c7242eff27, 'name': SearchDatastore_Task, 'duration_secs': 0.010401} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.142017] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.142017] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] d60aaa5c-913f-4550-a4d5-ab994048da9f/d60aaa5c-913f-4550-a4d5-ab994048da9f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.142017] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-100c0dee-c3ad-4bbc-858e-42323d162699 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.151227] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 815.151227] env[65758]: value = "task-4660458" [ 815.151227] env[65758]: _type = "Task" [ 815.151227] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.152681] env[65758]: WARNING neutronclient.v2_0.client [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 815.153565] env[65758]: WARNING openstack [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 815.154103] env[65758]: WARNING openstack [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 815.171696] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660458, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.277801] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a26833-cc77-325f-c254-5c1c92941260, 'name': SearchDatastore_Task, 'duration_secs': 0.009893} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.278166] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.278513] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 596a5005-3607-44a2-9c0e-f1a56865011c/596a5005-3607-44a2-9c0e-f1a56865011c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.278833] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b7a2544-1e81-4e6c-bb20-de638786af21 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.288160] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 815.288160] env[65758]: value = "task-4660459" [ 815.288160] env[65758]: _type = "Task" [ 815.288160] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.298568] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.301073] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.305019] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.308181] env[65758]: DEBUG nova.network.neutron [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Updated VIF entry in instance network info cache for port 06d61806-4596-49d1-968d-8ba5ad0b379d. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 815.308626] env[65758]: DEBUG nova.network.neutron [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Updating instance_info_cache with network_info: [{"id": "06d61806-4596-49d1-968d-8ba5ad0b379d", "address": "fa:16:3e:b5:ab:9e", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06d61806-45", "ovs_interfaceid": "06d61806-4596-49d1-968d-8ba5ad0b379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 815.437960] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660457, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.607997] env[65758]: DEBUG nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 815.678550] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660458, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50377} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.678857] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] d60aaa5c-913f-4550-a4d5-ab994048da9f/d60aaa5c-913f-4550-a4d5-ab994048da9f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 815.679029] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.679379] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-768bfa9b-bafa-47e3-9c64-40978053c9fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.688504] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 815.688504] env[65758]: value = "task-4660460" [ 815.688504] env[65758]: _type = "Task" [ 815.688504] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.697885] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660460, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.802419] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660459, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.812124] env[65758]: DEBUG oslo_concurrency.lockutils [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] Releasing lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.812621] env[65758]: DEBUG nova.compute.manager [req-23999936-5e06-463a-8d62-2530751e60c9 req-35307119-87dd-4945-90a2-608c98d7bb00 service nova] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Received event network-vif-deleted-0e626ecf-0686-4626-9e0c-31a51751b185 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 815.941764] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660457, 'name': CreateSnapshot_Task, 'duration_secs': 0.51417} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.942029] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 815.943627] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1ef968-d8ad-4294-a423-889df876c518 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.133309] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.206246] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660460, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077155} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.207132] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.208151] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d5c3fd-eea8-4517-9558-838985f24f01 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.236700] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] d60aaa5c-913f-4550-a4d5-ab994048da9f/d60aaa5c-913f-4550-a4d5-ab994048da9f.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.239965] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0cf9b09-7a32-4c6c-b7b2-c51d4acf560a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.263011] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 816.263011] env[65758]: value = "task-4660461" [ 816.263011] env[65758]: _type = "Task" [ 816.263011] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.274307] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660461, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.299438] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.746783} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.303010] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 596a5005-3607-44a2-9c0e-f1a56865011c/596a5005-3607-44a2-9c0e-f1a56865011c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.303010] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.305038] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00c71bd1-5441-4ea8-bb16-20a15cf202d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.314257] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 816.314257] env[65758]: value = "task-4660462" [ 816.314257] env[65758]: _type = "Task" [ 816.314257] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.327047] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.471235] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 816.471487] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f983d91e-c426-44fa-bf46-b5a50303fa6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.484772] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 816.484772] env[65758]: value = "task-4660463" [ 816.484772] env[65758]: _type = "Task" [ 816.484772] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.497989] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660463, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.636081] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afed5203-f7f7-4870-a10f-2f0b0d88fe9d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.645834] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6643609f-449e-4390-8b54-7a24871783cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.679497] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7774b08-6e75-41f2-b45d-f7f04f3cdde7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.690844] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781f96d9-a939-4155-863e-4e1a4fc4271c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.706392] env[65758]: DEBUG nova.compute.provider_tree [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.783868] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660461, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.828422] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086827} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.828707] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.829627] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07869fa-261e-425a-9a3b-695836951bc2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.855638] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 596a5005-3607-44a2-9c0e-f1a56865011c/596a5005-3607-44a2-9c0e-f1a56865011c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.857630] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43af67e3-4d09-4c45-ab6d-5bc40e94d86a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.874925] env[65758]: DEBUG nova.compute.manager [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Received event network-changed-ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 816.875532] env[65758]: DEBUG nova.compute.manager [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Refreshing instance network info cache due to event network-changed-ec4eeaee-4c33-4f1c-93a9-038d455eff39. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 816.875532] env[65758]: DEBUG oslo_concurrency.lockutils [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] Acquiring lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.875768] env[65758]: DEBUG oslo_concurrency.lockutils [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] Acquired lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.875768] env[65758]: DEBUG nova.network.neutron [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Refreshing network info cache for port ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 816.885899] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 816.885899] env[65758]: value = "task-4660464" [ 816.885899] env[65758]: _type = "Task" [ 816.885899] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.897270] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660464, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.998301] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660463, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.210915] env[65758]: DEBUG nova.scheduler.client.report [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.279106] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660461, 'name': ReconfigVM_Task, 'duration_secs': 0.600662} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.279758] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Reconfigured VM instance instance-00000037 to attach disk [datastore1] d60aaa5c-913f-4550-a4d5-ab994048da9f/d60aaa5c-913f-4550-a4d5-ab994048da9f.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.280206] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2fbf3e5-38e6-459f-b3b5-e36d2a0df652 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.288890] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 817.288890] env[65758]: value = "task-4660465" [ 817.288890] env[65758]: _type = "Task" [ 817.288890] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.298890] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660465, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.379225] env[65758]: WARNING neutronclient.v2_0.client [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 817.380256] env[65758]: WARNING openstack [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 817.380670] env[65758]: WARNING openstack [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 817.390993] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "e6159a35-f073-4931-b0b0-832a88680356" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.390993] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.391324] env[65758]: INFO nova.compute.manager [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Rebooting instance [ 817.418141] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660464, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.498744] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660463, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.607801] env[65758]: WARNING neutronclient.v2_0.client [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 817.608466] env[65758]: WARNING openstack [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 817.608823] env[65758]: WARNING openstack [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 817.719726] env[65758]: DEBUG nova.network.neutron [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Updated VIF entry in instance network info cache for port ec4eeaee-4c33-4f1c-93a9-038d455eff39. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 817.720282] env[65758]: DEBUG nova.network.neutron [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Updating instance_info_cache with network_info: [{"id": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "address": "fa:16:3e:7b:cc:ba", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec4eeaee-4c", "ovs_interfaceid": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 817.722705] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.885s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.723219] env[65758]: DEBUG nova.compute.manager [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 817.725919] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.199s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.728240] env[65758]: INFO nova.compute.claims [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.803121] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660465, 'name': Rename_Task, 'duration_secs': 0.26092} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.803518] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.803874] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88f648d3-a15c-4721-bdba-f4cb98b247de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.813337] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 817.813337] env[65758]: value = "task-4660466" [ 817.813337] env[65758]: _type = "Task" [ 817.813337] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.824465] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660466, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.903979] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660464, 'name': ReconfigVM_Task, 'duration_secs': 0.879837} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.904340] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 596a5005-3607-44a2-9c0e-f1a56865011c/596a5005-3607-44a2-9c0e-f1a56865011c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.905039] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df308d80-52bc-4776-9819-0048f50f947e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.912734] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 817.912734] env[65758]: value = "task-4660467" [ 817.912734] env[65758]: _type = "Task" [ 817.912734] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.923589] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660467, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.935802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.935981] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.936167] env[65758]: DEBUG nova.network.neutron [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 817.996664] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660463, 'name': CloneVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.235234] env[65758]: DEBUG oslo_concurrency.lockutils [req-f91059d3-d7f4-4bca-a59d-bd6b44afd695 req-82ff5b18-33d2-47b0-92dd-3b4b47551a73 service nova] Releasing lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.236456] env[65758]: DEBUG nova.compute.utils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 818.237897] env[65758]: DEBUG nova.compute.manager [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 818.324478] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660466, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.423408] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660467, 'name': Rename_Task, 'duration_secs': 0.187365} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.424209] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.424209] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-440d3303-46f6-4a57-80d3-d9de3a2adbef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.431907] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 818.431907] env[65758]: value = "task-4660468" [ 818.431907] env[65758]: _type = "Task" [ 818.431907] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.441030] env[65758]: WARNING neutronclient.v2_0.client [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 818.441338] env[65758]: WARNING openstack [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 818.441788] env[65758]: WARNING openstack [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 818.448982] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660468, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.497255] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660463, 'name': CloneVM_Task, 'duration_secs': 1.583262} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.497370] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Created linked-clone VM from snapshot [ 818.498340] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03caf39f-03a6-41ea-9ee3-9596f12c8c0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.506651] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Uploading image 08f49cb1-96c5-42f1-91f7-58aef5d61a5b {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 818.536692] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 818.536692] env[65758]: value = "vm-909921" [ 818.536692] env[65758]: _type = "VirtualMachine" [ 818.536692] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 818.536934] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-700b291b-a11f-47cc-a71c-b097523bcb04 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.544664] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lease: (returnval){ [ 818.544664] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52498346-9253-e0ea-8f72-588688ed358d" [ 818.544664] env[65758]: _type = "HttpNfcLease" [ 818.544664] env[65758]: } obtained for exporting VM: (result){ [ 818.544664] env[65758]: value = "vm-909921" [ 818.544664] env[65758]: _type = "VirtualMachine" [ 818.544664] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 818.544982] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the lease: (returnval){ [ 818.544982] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52498346-9253-e0ea-8f72-588688ed358d" [ 818.544982] env[65758]: _type = "HttpNfcLease" [ 818.544982] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 818.552994] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 818.552994] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52498346-9253-e0ea-8f72-588688ed358d" [ 818.552994] env[65758]: _type = "HttpNfcLease" [ 818.552994] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 818.630903] env[65758]: WARNING neutronclient.v2_0.client [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 818.631614] env[65758]: WARNING openstack [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 818.631976] env[65758]: WARNING openstack [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 818.742546] env[65758]: DEBUG nova.compute.manager [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 818.749939] env[65758]: DEBUG nova.network.neutron [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 818.827308] env[65758]: DEBUG oslo_vmware.api [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660466, 'name': PowerOnVM_Task, 'duration_secs': 0.814919} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.827308] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.827308] env[65758]: INFO nova.compute.manager [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Took 5.42 seconds to spawn the instance on the hypervisor. [ 818.827308] env[65758]: DEBUG nova.compute.manager [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 818.827864] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea1c5d0-9a05-4d06-81ce-bd74fd9f2ad6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.887340] env[65758]: DEBUG nova.compute.manager [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Received event network-changed-ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 818.887449] env[65758]: DEBUG nova.compute.manager [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Refreshing instance network info cache due to event network-changed-ec4eeaee-4c33-4f1c-93a9-038d455eff39. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 818.887827] env[65758]: DEBUG oslo_concurrency.lockutils [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] Acquiring lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.887827] env[65758]: DEBUG oslo_concurrency.lockutils [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] Acquired lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.887954] env[65758]: DEBUG nova.network.neutron [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Refreshing network info cache for port ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 818.944928] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660468, 'name': PowerOnVM_Task} progress is 87%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.053761] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 819.053761] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52498346-9253-e0ea-8f72-588688ed358d" [ 819.053761] env[65758]: _type = "HttpNfcLease" [ 819.053761] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 819.054112] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 819.054112] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52498346-9253-e0ea-8f72-588688ed358d" [ 819.054112] env[65758]: _type = "HttpNfcLease" [ 819.054112] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 819.054823] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba18e834-6ab7-42ad-aa93-b2ae4b0a709c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.066017] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d17b25-70f3-5029-9bfb-d0fd42ea561c/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 819.066220] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d17b25-70f3-5029-9bfb-d0fd42ea561c/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 819.167030] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-12a9ac6c-a31b-4bb3-bf2e-4cdfdfc3f1e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.256754] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.355489] env[65758]: INFO nova.compute.manager [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Took 50.61 seconds to build instance. [ 819.390564] env[65758]: WARNING neutronclient.v2_0.client [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 819.392068] env[65758]: WARNING openstack [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 819.392068] env[65758]: WARNING openstack [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 819.455506] env[65758]: DEBUG oslo_vmware.api [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660468, 'name': PowerOnVM_Task, 'duration_secs': 0.852891} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.455506] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.455506] env[65758]: INFO nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Took 8.77 seconds to spawn the instance on the hypervisor. [ 819.455959] env[65758]: DEBUG nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 819.456534] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28f8414-b691-408c-a966-2a0371f00108 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.495591] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfeb75d-4f9a-4029-8213-0eb5c68f9d3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.505167] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f337fbe5-76a7-4333-9d55-9f7340e45b3e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.547138] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5625869d-8e46-4042-b139-868d4eb6ae7c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.558474] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d613eb3-836e-4ed7-97e4-6554dd02d801 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.576411] env[65758]: DEBUG nova.compute.provider_tree [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.673846] env[65758]: WARNING neutronclient.v2_0.client [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 819.674369] env[65758]: WARNING openstack [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 819.674720] env[65758]: WARNING openstack [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 819.762734] env[65758]: DEBUG nova.compute.manager [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 819.769105] env[65758]: DEBUG nova.compute.manager [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 819.770126] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcfd244-3e59-4a94-96d6-7ac694f920b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.797879] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 819.797879] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 819.798073] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 819.798217] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 819.798355] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 819.798587] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 819.798936] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 819.799111] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 819.799345] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 819.799580] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 819.799814] env[65758]: DEBUG nova.virt.hardware [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 819.803386] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f458e684-8439-41dc-899d-558ca7c39c8e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.811580] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7668f267-e0fb-42de-b70e-e3c585d841ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.827171] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.832779] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 819.835889] env[65758]: DEBUG nova.network.neutron [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Updated VIF entry in instance network info cache for port ec4eeaee-4c33-4f1c-93a9-038d455eff39. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 819.836340] env[65758]: DEBUG nova.network.neutron [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Updating instance_info_cache with network_info: [{"id": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "address": "fa:16:3e:7b:cc:ba", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec4eeaee-4c", "ovs_interfaceid": "ec4eeaee-4c33-4f1c-93a9-038d455eff39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 819.837681] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 819.838489] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9826e7f8-05fb-464c-b76e-1bd5e68859d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.858648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-24dc0977-2b0d-4002-9557-665641740daa tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "d60aaa5c-913f-4550-a4d5-ab994048da9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.710s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.859358] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.859358] env[65758]: value = "task-4660470" [ 819.859358] env[65758]: _type = "Task" [ 819.859358] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.871200] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660470, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.981258] env[65758]: INFO nova.compute.manager [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Took 55.23 seconds to build instance. [ 820.085534] env[65758]: DEBUG nova.scheduler.client.report [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.339306] env[65758]: DEBUG oslo_concurrency.lockutils [req-906b1553-5119-464b-9a40-01e667ee3c8d req-fc8de2fe-f148-4033-9a09-29852ccaec17 service nova] Releasing lock "refresh_cache-7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.370042] env[65758]: DEBUG nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 820.382865] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660470, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.485554] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6293cea6-9871-4607-830e-79790e0b586d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "596a5005-3607-44a2-9c0e-f1a56865011c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.258s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.590311] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.864s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.591066] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 820.594586] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.523s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.595058] env[65758]: DEBUG nova.objects.instance [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lazy-loading 'resources' on Instance uuid 54db018a-d54c-4fe5-9a6e-600e801e00b0 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 820.789115] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b483f2-cfe0-46cc-9e71-5aac1acbfd8a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.798508] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Doing hard reboot of VM {{(pid=65758) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 820.798856] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-4f6e9192-72e9-4d73-98a5-ead1ee272e1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.808455] env[65758]: DEBUG oslo_vmware.api [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 820.808455] env[65758]: value = "task-4660471" [ 820.808455] env[65758]: _type = "Task" [ 820.808455] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.818543] env[65758]: DEBUG oslo_vmware.api [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660471, 'name': ResetVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.852696] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "875cbc88-f817-4ea8-a969-b97e875918d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.853239] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "875cbc88-f817-4ea8-a969-b97e875918d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.877348] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660470, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.895767] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.989166] env[65758]: DEBUG nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 821.096603] env[65758]: DEBUG nova.compute.utils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.098554] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 821.098761] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 821.099107] env[65758]: WARNING neutronclient.v2_0.client [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 821.099424] env[65758]: WARNING neutronclient.v2_0.client [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 821.100037] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 821.100369] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 821.167021] env[65758]: DEBUG nova.policy [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68c293475b1641bda0a7f5a13f1cdbe1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4154e353eb4142178244814f4ebd6167', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 821.324795] env[65758]: DEBUG oslo_vmware.api [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660471, 'name': ResetVM_Task, 'duration_secs': 0.114474} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.326259] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Did hard reboot of VM {{(pid=65758) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 821.326680] env[65758]: DEBUG nova.compute.manager [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 821.327761] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df7a9ce-80a4-439a-b29c-0ffcb793dbeb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.357620] env[65758]: DEBUG nova.compute.manager [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Received event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 821.357889] env[65758]: DEBUG nova.compute.manager [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing instance network info cache due to event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 821.358032] env[65758]: DEBUG oslo_concurrency.lockutils [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] Acquiring lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.358175] env[65758]: DEBUG oslo_concurrency.lockutils [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] Acquired lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.358334] env[65758]: DEBUG nova.network.neutron [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 821.372564] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660470, 'name': CreateVM_Task, 'duration_secs': 1.448976} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.375381] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.376273] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.376469] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.376731] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 821.377249] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccde952f-8c3c-4656-8b16-897066a1dd05 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.385223] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 821.385223] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ef71d4-5649-a54d-0206-c501c9d497c6" [ 821.385223] env[65758]: _type = "Task" [ 821.385223] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.397147] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ef71d4-5649-a54d-0206-c501c9d497c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.512686] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.592708] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Successfully created port: 390c38a1-9300-466c-ab54-f85316e00181 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 821.600719] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 821.840062] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c457afc-b3d9-4ae1-ba5e-86bc9564088b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.848641] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ba7f577-d70e-428c-8c1f-07fe41c21e6c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.458s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.854080] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2b81cc-a0e3-4cd3-8e89-6f33b46c0e8d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.883975] env[65758]: WARNING neutronclient.v2_0.client [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 821.884727] env[65758]: WARNING openstack [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 821.885098] env[65758]: WARNING openstack [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 821.894652] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afe865a-b1eb-415e-96db-9319f8890eca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.907670] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ef71d4-5649-a54d-0206-c501c9d497c6, 'name': SearchDatastore_Task, 'duration_secs': 0.014623} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.910585] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.910885] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.911134] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.911278] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.911480] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.911799] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87346fd5-4850-41e7-800a-d64270151ae6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.915045] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf26ae6a-c8e2-4815-98fa-b60e31ab8409 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.934260] env[65758]: DEBUG nova.compute.provider_tree [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.937842] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.938153] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.939443] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bd17c77-7fb8-438d-904e-baf276ef693f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.947129] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 821.947129] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a2ae0e-2dd2-8414-bc75-362a78698472" [ 821.947129] env[65758]: _type = "Task" [ 821.947129] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.957753] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a2ae0e-2dd2-8414-bc75-362a78698472, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.440516] env[65758]: DEBUG nova.scheduler.client.report [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 822.459870] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a2ae0e-2dd2-8414-bc75-362a78698472, 'name': SearchDatastore_Task, 'duration_secs': 0.011495} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.461467] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d04e026-f19c-4b49-922b-689d2b1fbc62 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.468844] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 822.468844] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526c7fee-1b32-a9e3-3343-c16f7a7c0336" [ 822.468844] env[65758]: _type = "Task" [ 822.468844] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.479528] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526c7fee-1b32-a9e3-3343-c16f7a7c0336, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.616531] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 822.645344] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 822.645683] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 822.645914] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 822.646152] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 822.646565] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 822.646565] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 822.646780] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 822.646981] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 822.647258] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 822.647484] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 822.647701] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 822.648704] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016ac535-1bc5-459f-a498-096d642c63bd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.658515] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f25455-0101-464b-a0f4-39181db7cb52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.948873] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.354s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.954241] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.785s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.954736] env[65758]: DEBUG nova.objects.instance [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lazy-loading 'resources' on Instance uuid a0a9d947-f2ad-4a35-b336-1486c9a76b06 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.987333] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526c7fee-1b32-a9e3-3343-c16f7a7c0336, 'name': SearchDatastore_Task, 'duration_secs': 0.011038} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.987333] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.987333] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5/548edde0-9e42-4cd3-bdd3-3615ab9b7fc5.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.987333] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a0b5bb4-0cfc-4606-8211-9479c85087ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.994971] env[65758]: INFO nova.scheduler.client.report [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted allocations for instance 54db018a-d54c-4fe5-9a6e-600e801e00b0 [ 823.001138] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 823.001138] env[65758]: value = "task-4660472" [ 823.001138] env[65758]: _type = "Task" [ 823.001138] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.011500] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.130471] env[65758]: WARNING neutronclient.v2_0.client [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 823.131274] env[65758]: WARNING openstack [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 823.131694] env[65758]: WARNING openstack [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 823.188664] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Successfully updated port: 390c38a1-9300-466c-ab54-f85316e00181 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 823.291427] env[65758]: DEBUG nova.network.neutron [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updated VIF entry in instance network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 823.291802] env[65758]: DEBUG nova.network.neutron [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updating instance_info_cache with network_info: [{"id": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "address": "fa:16:3e:f8:f1:bd", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap972faaf3-0e", "ovs_interfaceid": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 823.509786] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d26b2029-1414-48d6-ac08-9ae5b60d4ecc tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "54db018a-d54c-4fe5-9a6e-600e801e00b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.650s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.526764] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523582} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.526764] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5/548edde0-9e42-4cd3-bdd3-3615ab9b7fc5.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.526961] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.527942] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1aa9dd73-a6d1-44c3-bf38-240b5a67ab41 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.541564] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 823.541564] env[65758]: value = "task-4660473" [ 823.541564] env[65758]: _type = "Task" [ 823.541564] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.560785] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.696747] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "refresh_cache-c1b9d81e-d747-4665-a083-26d8383f7645" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.696747] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "refresh_cache-c1b9d81e-d747-4665-a083-26d8383f7645" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.697398] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 823.795969] env[65758]: DEBUG oslo_concurrency.lockutils [req-0faa73e1-3167-4f80-b108-77239c6f3699 req-b31756af-fa1e-419c-bdf8-2f4598ed7632 service nova] Releasing lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.048770] env[65758]: DEBUG nova.compute.manager [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Received event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 824.048770] env[65758]: DEBUG nova.compute.manager [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing instance network info cache due to event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 824.049680] env[65758]: DEBUG oslo_concurrency.lockutils [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] Acquiring lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.049680] env[65758]: DEBUG oslo_concurrency.lockutils [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] Acquired lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.049680] env[65758]: DEBUG nova.network.neutron [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 824.060824] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069882} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.061761] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.062978] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a773b6a1-3c04-4984-a84f-5be51be0e713 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.073676] env[65758]: DEBUG nova.compute.manager [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Received event network-vif-plugged-390c38a1-9300-466c-ab54-f85316e00181 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 824.073727] env[65758]: DEBUG oslo_concurrency.lockutils [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Acquiring lock "c1b9d81e-d747-4665-a083-26d8383f7645-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.073901] env[65758]: DEBUG oslo_concurrency.lockutils [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Lock "c1b9d81e-d747-4665-a083-26d8383f7645-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.074075] env[65758]: DEBUG oslo_concurrency.lockutils [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Lock "c1b9d81e-d747-4665-a083-26d8383f7645-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.074293] env[65758]: DEBUG nova.compute.manager [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] No waiting events found dispatching network-vif-plugged-390c38a1-9300-466c-ab54-f85316e00181 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 824.076437] env[65758]: WARNING nova.compute.manager [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Received unexpected event network-vif-plugged-390c38a1-9300-466c-ab54-f85316e00181 for instance with vm_state building and task_state spawning. [ 824.076437] env[65758]: DEBUG nova.compute.manager [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Received event network-changed-390c38a1-9300-466c-ab54-f85316e00181 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 824.076437] env[65758]: DEBUG nova.compute.manager [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Refreshing instance network info cache due to event network-changed-390c38a1-9300-466c-ab54-f85316e00181. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 824.076437] env[65758]: DEBUG oslo_concurrency.lockutils [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Acquiring lock "refresh_cache-c1b9d81e-d747-4665-a083-26d8383f7645" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.097994] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5/548edde0-9e42-4cd3-bdd3-3615ab9b7fc5.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.103324] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-286e9ec2-30c5-4d70-b981-6942c0810990 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.127129] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 824.127129] env[65758]: value = "task-4660474" [ 824.127129] env[65758]: _type = "Task" [ 824.127129] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.140788] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660474, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.158549] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a326b3e-bf58-4ebc-9947-634c4164cb77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.171518] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cdfdd5-0830-4432-9be7-fbfaec6beaa9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.209515] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 824.209941] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 824.218397] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828a1a7f-81a7-415a-a891-4989f8a77c6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.227590] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d68bda-e9d3-4403-99ef-89f73dde26e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.243466] env[65758]: DEBUG nova.compute.provider_tree [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.254213] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 824.348420] env[65758]: WARNING neutronclient.v2_0.client [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 824.349417] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 824.349972] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 824.450262] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Updating instance_info_cache with network_info: [{"id": "390c38a1-9300-466c-ab54-f85316e00181", "address": "fa:16:3e:a7:2b:c5", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap390c38a1-93", "ovs_interfaceid": "390c38a1-9300-466c-ab54-f85316e00181", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 824.556840] env[65758]: WARNING neutronclient.v2_0.client [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 824.556840] env[65758]: WARNING openstack [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 824.556840] env[65758]: WARNING openstack [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 824.623369] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "9e007d55-0a5c-4469-a546-9b18e188bea0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.623599] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "9e007d55-0a5c-4469-a546-9b18e188bea0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.623939] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "9e007d55-0a5c-4469-a546-9b18e188bea0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.624118] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "9e007d55-0a5c-4469-a546-9b18e188bea0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.624276] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "9e007d55-0a5c-4469-a546-9b18e188bea0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.626628] env[65758]: INFO nova.compute.manager [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Terminating instance [ 824.646263] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660474, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.747557] env[65758]: DEBUG nova.scheduler.client.report [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 824.781968] env[65758]: WARNING neutronclient.v2_0.client [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 824.782758] env[65758]: WARNING openstack [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 824.783177] env[65758]: WARNING openstack [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 824.889375] env[65758]: DEBUG nova.network.neutron [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updated VIF entry in instance network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 824.889749] env[65758]: DEBUG nova.network.neutron [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updating instance_info_cache with network_info: [{"id": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "address": "fa:16:3e:f8:f1:bd", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap972faaf3-0e", "ovs_interfaceid": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 824.952164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "refresh_cache-c1b9d81e-d747-4665-a083-26d8383f7645" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.952652] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Instance network_info: |[{"id": "390c38a1-9300-466c-ab54-f85316e00181", "address": "fa:16:3e:a7:2b:c5", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap390c38a1-93", "ovs_interfaceid": "390c38a1-9300-466c-ab54-f85316e00181", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 824.953052] env[65758]: DEBUG oslo_concurrency.lockutils [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Acquired lock "refresh_cache-c1b9d81e-d747-4665-a083-26d8383f7645" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.953197] env[65758]: DEBUG nova.network.neutron [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Refreshing network info cache for port 390c38a1-9300-466c-ab54-f85316e00181 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 824.954501] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:2b:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '085fb0ff-9285-4f1d-a008-a14da4844357', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '390c38a1-9300-466c-ab54-f85316e00181', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.963192] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 824.963705] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.964310] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec093298-9132-4721-b1e2-71420b155e7c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.986063] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.986063] env[65758]: value = "task-4660475" [ 824.986063] env[65758]: _type = "Task" [ 824.986063] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.995063] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660475, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.139140] env[65758]: DEBUG nova.compute.manager [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 825.139441] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 825.139677] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660474, 'name': ReconfigVM_Task, 'duration_secs': 0.574018} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.140605] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db2106e-84dc-4c5c-b6c9-f7d7dcb5f637 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.144724] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5/548edde0-9e42-4cd3-bdd3-3615ab9b7fc5.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.145576] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c7f662c-3b4c-4d77-b731-78471d124c2b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.156737] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 825.156737] env[65758]: value = "task-4660476" [ 825.156737] env[65758]: _type = "Task" [ 825.156737] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.157082] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 825.157442] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2637926e-2c81-4092-8a26-894b1db95de7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.169844] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660476, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.171940] env[65758]: DEBUG oslo_vmware.api [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 825.171940] env[65758]: value = "task-4660477" [ 825.171940] env[65758]: _type = "Task" [ 825.171940] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.182809] env[65758]: DEBUG oslo_vmware.api [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.253707] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.299s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.258130] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.810s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.259984] env[65758]: INFO nova.compute.claims [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.296145] env[65758]: INFO nova.scheduler.client.report [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted allocations for instance a0a9d947-f2ad-4a35-b336-1486c9a76b06 [ 825.393135] env[65758]: DEBUG oslo_concurrency.lockutils [req-06575ce4-6aa9-407b-b55c-26f40b9eccf7 req-f0f0f84f-0c25-4528-b5c1-3c0d91302c61 service nova] Releasing lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.466418] env[65758]: WARNING neutronclient.v2_0.client [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 825.467562] env[65758]: WARNING openstack [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 825.467968] env[65758]: WARNING openstack [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 825.498308] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660475, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.666092] env[65758]: WARNING neutronclient.v2_0.client [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 825.666692] env[65758]: WARNING openstack [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 825.667013] env[65758]: WARNING openstack [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 825.687588] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660476, 'name': Rename_Task, 'duration_secs': 0.260033} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.691146] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.691146] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e3cd19b-0cbb-4b2a-aca5-6adbb638f340 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.699209] env[65758]: DEBUG oslo_vmware.api [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660477, 'name': PowerOffVM_Task, 'duration_secs': 0.432285} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.700296] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 825.700903] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 825.701070] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0be1358-2f38-4746-b1f6-254cde8a09ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.708928] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 825.708928] env[65758]: value = "task-4660478" [ 825.708928] env[65758]: _type = "Task" [ 825.708928] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.725690] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.803894] env[65758]: DEBUG nova.network.neutron [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Updated VIF entry in instance network info cache for port 390c38a1-9300-466c-ab54-f85316e00181. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 825.804163] env[65758]: DEBUG nova.network.neutron [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Updating instance_info_cache with network_info: [{"id": "390c38a1-9300-466c-ab54-f85316e00181", "address": "fa:16:3e:a7:2b:c5", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap390c38a1-93", "ovs_interfaceid": "390c38a1-9300-466c-ab54-f85316e00181", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 825.807991] env[65758]: DEBUG oslo_concurrency.lockutils [None req-89b14d7d-fac5-4177-8a8f-d9cc634496df tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a0a9d947-f2ad-4a35-b336-1486c9a76b06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.178s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.943863] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 825.944170] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 825.944388] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleting the datastore file [datastore1] 9e007d55-0a5c-4469-a546-9b18e188bea0 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 825.944738] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d83a69cd-69ef-4dec-841f-b3b85c1ff7f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.953334] env[65758]: DEBUG oslo_vmware.api [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 825.953334] env[65758]: value = "task-4660480" [ 825.953334] env[65758]: _type = "Task" [ 825.953334] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.963290] env[65758]: DEBUG oslo_vmware.api [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.998702] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660475, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.148670] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "596a5005-3607-44a2-9c0e-f1a56865011c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.150020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "596a5005-3607-44a2-9c0e-f1a56865011c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.150020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "596a5005-3607-44a2-9c0e-f1a56865011c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.150020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "596a5005-3607-44a2-9c0e-f1a56865011c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.150020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "596a5005-3607-44a2-9c0e-f1a56865011c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.153259] env[65758]: INFO nova.compute.manager [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Terminating instance [ 826.211563] env[65758]: DEBUG nova.compute.manager [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Received event network-changed-06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 826.211742] env[65758]: DEBUG nova.compute.manager [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Refreshing instance network info cache due to event network-changed-06d61806-4596-49d1-968d-8ba5ad0b379d. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 826.211951] env[65758]: DEBUG oslo_concurrency.lockutils [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Acquiring lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.212158] env[65758]: DEBUG oslo_concurrency.lockutils [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Acquired lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.212393] env[65758]: DEBUG nova.network.neutron [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Refreshing network info cache for port 06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 826.225528] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660478, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.312098] env[65758]: DEBUG oslo_concurrency.lockutils [req-c37d928e-296d-4bad-b6b7-508b57ec4935 req-f28ea3f6-febc-4eee-baba-18fd2311d9c5 service nova] Releasing lock "refresh_cache-c1b9d81e-d747-4665-a083-26d8383f7645" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.473979] env[65758]: DEBUG oslo_vmware.api [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158543} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.474391] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 826.474391] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 826.475023] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 826.475023] env[65758]: INFO nova.compute.manager [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Took 1.34 seconds to destroy the instance on the hypervisor. [ 826.475023] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 826.475394] env[65758]: DEBUG nova.compute.manager [-] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 826.475816] env[65758]: DEBUG nova.network.neutron [-] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 826.476104] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 826.477084] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 826.477283] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 826.500097] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660475, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.567102] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 826.658714] env[65758]: DEBUG nova.compute.manager [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 826.658952] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.659850] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda06e78-c606-4d61-8839-a36afdc0c9e8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.668464] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.668816] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b9ca229-0db5-411c-b84f-e74eb3446333 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.679081] env[65758]: DEBUG oslo_vmware.api [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 826.679081] env[65758]: value = "task-4660481" [ 826.679081] env[65758]: _type = "Task" [ 826.679081] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.688762] env[65758]: DEBUG oslo_vmware.api [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.720447] env[65758]: WARNING neutronclient.v2_0.client [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 826.721366] env[65758]: WARNING openstack [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 826.721658] env[65758]: WARNING openstack [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 826.735931] env[65758]: DEBUG oslo_vmware.api [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660478, 'name': PowerOnVM_Task, 'duration_secs': 0.831268} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.737415] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.737415] env[65758]: INFO nova.compute.manager [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Took 6.97 seconds to spawn the instance on the hypervisor. [ 826.737415] env[65758]: DEBUG nova.compute.manager [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 826.737947] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830aeb0a-1f13-4526-a84a-747788f12364 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.761385] env[65758]: DEBUG nova.compute.manager [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Received event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 826.761643] env[65758]: DEBUG nova.compute.manager [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing instance network info cache due to event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 826.761643] env[65758]: DEBUG oslo_concurrency.lockutils [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] Acquiring lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.762095] env[65758]: DEBUG oslo_concurrency.lockutils [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] Acquired lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.762095] env[65758]: DEBUG nova.network.neutron [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 827.008191] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660475, 'name': CreateVM_Task, 'duration_secs': 1.532291} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.010032] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 827.010032] env[65758]: WARNING neutronclient.v2_0.client [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 827.010032] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.010032] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.010032] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 827.010322] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bebfa30-b505-46b2-afd5-83ac8c160bf1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.019753] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 827.019753] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc15ce-78d9-64ab-be64-5d225d6417ec" [ 827.019753] env[65758]: _type = "Task" [ 827.019753] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.034861] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc15ce-78d9-64ab-be64-5d225d6417ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.043402] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc26fbf-b114-47de-9faf-82f1e50c874f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.051739] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0098200-56cc-437d-a8cd-90d88dc06d59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.094483] env[65758]: WARNING neutronclient.v2_0.client [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 827.095159] env[65758]: WARNING openstack [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 827.095642] env[65758]: WARNING openstack [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 827.106652] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cac39ac-27a0-4b03-a666-0db5e86e5824 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.116422] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4151ae7-bc7c-4653-b287-94c4b550b849 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.133331] env[65758]: DEBUG nova.compute.provider_tree [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.192924] env[65758]: DEBUG oslo_vmware.api [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660481, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.195267] env[65758]: DEBUG nova.network.neutron [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Updated VIF entry in instance network info cache for port 06d61806-4596-49d1-968d-8ba5ad0b379d. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 827.195634] env[65758]: DEBUG nova.network.neutron [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Updating instance_info_cache with network_info: [{"id": "06d61806-4596-49d1-968d-8ba5ad0b379d", "address": "fa:16:3e:b5:ab:9e", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06d61806-45", "ovs_interfaceid": "06d61806-4596-49d1-968d-8ba5ad0b379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 827.262216] env[65758]: INFO nova.compute.manager [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Took 50.80 seconds to build instance. [ 827.268401] env[65758]: WARNING neutronclient.v2_0.client [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 827.269356] env[65758]: WARNING openstack [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 827.269504] env[65758]: WARNING openstack [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 827.360645] env[65758]: DEBUG nova.network.neutron [-] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 827.485992] env[65758]: WARNING neutronclient.v2_0.client [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 827.486894] env[65758]: WARNING openstack [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 827.487420] env[65758]: WARNING openstack [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 827.539451] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc15ce-78d9-64ab-be64-5d225d6417ec, 'name': SearchDatastore_Task, 'duration_secs': 0.026794} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.539878] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.540267] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 827.540558] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.540730] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.541115] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.541533] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-447f4a8b-06b2-4a20-86cb-a138cd524599 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.564519] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.564899] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 827.565679] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51377fc1-8c4a-4172-9b5b-5b2ba60805b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.574764] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 827.574764] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52894026-c62e-045d-0346-995f6462b4e2" [ 827.574764] env[65758]: _type = "Task" [ 827.574764] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.586490] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52894026-c62e-045d-0346-995f6462b4e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.604581] env[65758]: DEBUG nova.network.neutron [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updated VIF entry in instance network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 827.604955] env[65758]: DEBUG nova.network.neutron [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updating instance_info_cache with network_info: [{"id": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "address": "fa:16:3e:f8:f1:bd", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap972faaf3-0e", "ovs_interfaceid": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 827.638236] env[65758]: DEBUG nova.scheduler.client.report [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 827.692729] env[65758]: DEBUG oslo_vmware.api [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660481, 'name': PowerOffVM_Task, 'duration_secs': 0.929366} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.693024] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.693165] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.693539] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f32d840a-cd1a-433e-9357-8fd6a7d71c04 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.698247] env[65758]: DEBUG oslo_concurrency.lockutils [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Releasing lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.698520] env[65758]: DEBUG nova.compute.manager [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Received event network-changed-06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 827.698756] env[65758]: DEBUG nova.compute.manager [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Refreshing instance network info cache due to event network-changed-06d61806-4596-49d1-968d-8ba5ad0b379d. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 827.698890] env[65758]: DEBUG oslo_concurrency.lockutils [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Acquiring lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.699033] env[65758]: DEBUG oslo_concurrency.lockutils [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Acquired lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.699192] env[65758]: DEBUG nova.network.neutron [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Refreshing network info cache for port 06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 827.765014] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d5d1014-727d-403e-bf2b-c0241c197caf tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.168s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.782763] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 827.783160] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 827.783422] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Deleting the datastore file [datastore2] 596a5005-3607-44a2-9c0e-f1a56865011c {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 827.784194] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d1dd7b4-87df-437f-9ae8-08e3cf6e7d94 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.794326] env[65758]: DEBUG oslo_vmware.api [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 827.794326] env[65758]: value = "task-4660483" [ 827.794326] env[65758]: _type = "Task" [ 827.794326] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.806022] env[65758]: DEBUG oslo_vmware.api [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660483, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.863535] env[65758]: INFO nova.compute.manager [-] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Took 1.39 seconds to deallocate network for instance. [ 828.057929] env[65758]: INFO nova.compute.manager [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Rebuilding instance [ 828.095643] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52894026-c62e-045d-0346-995f6462b4e2, 'name': SearchDatastore_Task, 'duration_secs': 0.01188} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.097168] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14b4f8eb-42b7-4197-ba08-183943a0cba8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.104494] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 828.104494] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522e07e8-9386-825a-918e-a2f6601efd8c" [ 828.104494] env[65758]: _type = "Task" [ 828.104494] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.108252] env[65758]: DEBUG oslo_concurrency.lockutils [req-970090c2-c474-4fe0-9f92-06418f4fc32a req-dd0c7220-4425-4737-99f6-3b3d9cb76d93 service nova] Releasing lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.115412] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522e07e8-9386-825a-918e-a2f6601efd8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.123075] env[65758]: DEBUG nova.compute.manager [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 828.123697] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbada6e9-9a3a-4f39-8048-862914825294 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.146433] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.889s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.146985] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 828.149661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.328s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.149848] env[65758]: DEBUG nova.objects.instance [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 828.202466] env[65758]: WARNING neutronclient.v2_0.client [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.203840] env[65758]: WARNING openstack [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 828.204581] env[65758]: WARNING openstack [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 828.269096] env[65758]: DEBUG nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 828.310051] env[65758]: DEBUG oslo_vmware.api [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660483, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16369} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.316364] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.316803] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.317383] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.317720] env[65758]: INFO nova.compute.manager [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Took 1.66 seconds to destroy the instance on the hypervisor. [ 828.318869] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 828.319436] env[65758]: DEBUG nova.compute.manager [-] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 828.319691] env[65758]: DEBUG nova.network.neutron [-] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 828.320191] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.322387] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 828.322387] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 828.375021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.394657] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.528314] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d17b25-70f3-5029-9bfb-d0fd42ea561c/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 828.529752] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86dc1186-e24f-43fa-9faa-215866b06b54 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.537831] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d17b25-70f3-5029-9bfb-d0fd42ea561c/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 828.538109] env[65758]: ERROR oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d17b25-70f3-5029-9bfb-d0fd42ea561c/disk-0.vmdk due to incomplete transfer. [ 828.538347] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2cf974ac-9213-417f-9167-caf39c0fcadb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.547627] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d17b25-70f3-5029-9bfb-d0fd42ea561c/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 828.547627] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Uploaded image 08f49cb1-96c5-42f1-91f7-58aef5d61a5b to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 828.550421] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 828.550799] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-46ef21dc-1aec-49cb-acfd-c3b7da616a13 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.558509] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 828.558509] env[65758]: value = "task-4660484" [ 828.558509] env[65758]: _type = "Task" [ 828.558509] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.571452] env[65758]: WARNING neutronclient.v2_0.client [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.572165] env[65758]: WARNING openstack [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 828.572616] env[65758]: WARNING openstack [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 828.584675] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660484, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.622370] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522e07e8-9386-825a-918e-a2f6601efd8c, 'name': SearchDatastore_Task, 'duration_secs': 0.011475} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.622370] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.622541] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] c1b9d81e-d747-4665-a083-26d8383f7645/c1b9d81e-d747-4665-a083-26d8383f7645.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 828.623376] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86ee6faa-2794-4ea2-a61d-ae6a33efc39f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.633801] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 828.633801] env[65758]: value = "task-4660485" [ 828.633801] env[65758]: _type = "Task" [ 828.633801] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.650573] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.663071] env[65758]: DEBUG nova.compute.utils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 828.665524] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 828.665524] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 828.665800] env[65758]: WARNING neutronclient.v2_0.client [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.666594] env[65758]: WARNING neutronclient.v2_0.client [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 828.667229] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 828.669392] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 828.745985] env[65758]: DEBUG nova.policy [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68c293475b1641bda0a7f5a13f1cdbe1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4154e353eb4142178244814f4ebd6167', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 828.803299] env[65758]: DEBUG nova.network.neutron [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Updated VIF entry in instance network info cache for port 06d61806-4596-49d1-968d-8ba5ad0b379d. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 828.803299] env[65758]: DEBUG nova.network.neutron [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Updating instance_info_cache with network_info: [{"id": "06d61806-4596-49d1-968d-8ba5ad0b379d", "address": "fa:16:3e:b5:ab:9e", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06d61806-45", "ovs_interfaceid": "06d61806-4596-49d1-968d-8ba5ad0b379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 828.808174] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.996272] env[65758]: DEBUG nova.compute.manager [req-38c8dd13-2ae9-4fd6-96d9-904778c48146 req-58c7173d-2810-4f0b-9d0e-1be443a1eec4 service nova] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Received event network-vif-deleted-fc47a856-bb57-45b8-986b-bc9bcf87abe6 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 829.074703] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660484, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.147988] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 829.148490] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660485, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.148833] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcae1bab-150c-4d4a-ae67-37c020ab1892 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.161231] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 829.161231] env[65758]: value = "task-4660486" [ 829.161231] env[65758]: _type = "Task" [ 829.161231] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.167054] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf46a75e-28ed-4870-99b5-21c86a720ce3 tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.168970] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 38.657s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.181062] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 829.184802] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.208101] env[65758]: DEBUG nova.network.neutron [-] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 829.230895] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Successfully created port: 6309f110-000f-4e57-a80d-4966b9d936ef {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 829.305604] env[65758]: DEBUG oslo_concurrency.lockutils [req-e26fcbc2-f505-4594-ac45-6318a4f924e3 req-5ea4c591-6421-4ee4-8ac2-84295f6a7d46 service nova] Releasing lock "refresh_cache-596a5005-3607-44a2-9c0e-f1a56865011c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.427179] env[65758]: DEBUG nova.compute.manager [req-15c478af-e017-488e-a799-b42d2e4b9b09 req-3eb2376d-06d9-40c7-8432-26b7586b0d86 service nova] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Received event network-vif-deleted-06d61806-4596-49d1-968d-8ba5ad0b379d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 829.570914] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660484, 'name': Destroy_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.646234] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586412} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.646234] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] c1b9d81e-d747-4665-a083-26d8383f7645/c1b9d81e-d747-4665-a083-26d8383f7645.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 829.646234] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 829.646234] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4f3d5dc-dcb0-45dc-94ab-1634c8699b47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.654763] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 829.654763] env[65758]: value = "task-4660487" [ 829.654763] env[65758]: _type = "Task" [ 829.654763] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.666315] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.680433] env[65758]: INFO nova.compute.claims [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.686810] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.710800] env[65758]: INFO nova.compute.manager [-] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Took 1.39 seconds to deallocate network for instance. [ 830.071661] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660484, 'name': Destroy_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.169167] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070493} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.172575] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 830.173441] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fbbba2-f9be-48ba-b528-e64ca98c3d4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.184105] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.196474] env[65758]: INFO nova.compute.resource_tracker [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating resource usage from migration 37e0bba4-7690-4c4c-9e66-0b8b93f50a0f [ 830.201354] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 830.212950] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] c1b9d81e-d747-4665-a083-26d8383f7645/c1b9d81e-d747-4665-a083-26d8383f7645.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 830.214084] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5aff9151-98e5-40cd-9c10-b4f3561321b4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.230758] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.238435] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 830.238435] env[65758]: value = "task-4660488" [ 830.238435] env[65758]: _type = "Task" [ 830.238435] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.249500] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 830.249741] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.249941] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 830.250086] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.250229] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 830.250390] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 830.251082] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.251082] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 830.251082] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 830.251082] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 830.251384] env[65758]: DEBUG nova.virt.hardware [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 830.252543] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1845b33c-439b-4768-a6bd-0580a60f7d43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.267124] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660488, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.272436] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c79f10-deb0-4393-b928-fc1cf5a5ac87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.572712] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660484, 'name': Destroy_Task, 'duration_secs': 1.571759} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.572981] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Destroyed the VM [ 830.573288] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 830.573623] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-374453d4-529a-432a-a3e1-2feb38d2e9db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.581168] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 830.581168] env[65758]: value = "task-4660489" [ 830.581168] env[65758]: _type = "Task" [ 830.581168] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.595604] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660489, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.683895] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660486, 'name': PowerOffVM_Task, 'duration_secs': 1.078136} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.684472] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.684777] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.685707] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a73719-74b4-4823-89e8-18f68c927ea9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.704389] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 830.704389] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9410fb4-a93c-4358-b172-6cde1e7ca0d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.754841] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 830.754841] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 830.755084] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Deleting the datastore file [datastore2] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.756292] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50339112-16a2-4da9-b6e0-e466967d45f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.763755] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660488, 'name': ReconfigVM_Task, 'duration_secs': 0.279645} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.765035] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Reconfigured VM instance instance-00000039 to attach disk [datastore2] c1b9d81e-d747-4665-a083-26d8383f7645/c1b9d81e-d747-4665-a083-26d8383f7645.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.765925] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-004bb9d3-89a4-423b-9b4b-2710627f71b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.772386] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 830.772386] env[65758]: value = "task-4660491" [ 830.772386] env[65758]: _type = "Task" [ 830.772386] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.777622] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 830.777622] env[65758]: value = "task-4660492" [ 830.777622] env[65758]: _type = "Task" [ 830.777622] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.785738] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.796954] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660492, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.807073] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Successfully updated port: 6309f110-000f-4e57-a80d-4966b9d936ef {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 830.834587] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99061564-c5e4-41cd-964c-4e9dfe5a6b9b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.847154] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552eb5c1-ad3a-4a7d-b0fe-8236156b5cf9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.885988] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e87cfc-e1b3-45a5-87e9-1ce2a2569cb0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.895482] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d6baaa-36b7-4acf-ae1f-4be48bc3ec73 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.911125] env[65758]: DEBUG nova.compute.provider_tree [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.095042] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660489, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.123514] env[65758]: DEBUG nova.compute.manager [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Received event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 831.123710] env[65758]: DEBUG nova.compute.manager [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing instance network info cache due to event network-changed-972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 831.123925] env[65758]: DEBUG oslo_concurrency.lockutils [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] Acquiring lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.124056] env[65758]: DEBUG oslo_concurrency.lockutils [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] Acquired lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.124219] env[65758]: DEBUG nova.network.neutron [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Refreshing network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 831.285756] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11663} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.286041] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 831.286240] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 831.286415] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 831.292194] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660492, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.311568] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "refresh_cache-b7323030-4573-4af5-a19a-212a140d642a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.311848] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "refresh_cache-b7323030-4573-4af5-a19a-212a140d642a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.312125] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 831.414217] env[65758]: DEBUG nova.scheduler.client.report [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.460752] env[65758]: DEBUG nova.compute.manager [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Received event network-vif-plugged-6309f110-000f-4e57-a80d-4966b9d936ef {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 831.460811] env[65758]: DEBUG oslo_concurrency.lockutils [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Acquiring lock "b7323030-4573-4af5-a19a-212a140d642a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.461171] env[65758]: DEBUG oslo_concurrency.lockutils [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Lock "b7323030-4573-4af5-a19a-212a140d642a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.461219] env[65758]: DEBUG oslo_concurrency.lockutils [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Lock "b7323030-4573-4af5-a19a-212a140d642a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.461349] env[65758]: DEBUG nova.compute.manager [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] No waiting events found dispatching network-vif-plugged-6309f110-000f-4e57-a80d-4966b9d936ef {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 831.461507] env[65758]: WARNING nova.compute.manager [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Received unexpected event network-vif-plugged-6309f110-000f-4e57-a80d-4966b9d936ef for instance with vm_state building and task_state spawning. [ 831.462253] env[65758]: DEBUG nova.compute.manager [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Received event network-changed-6309f110-000f-4e57-a80d-4966b9d936ef {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 831.462253] env[65758]: DEBUG nova.compute.manager [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Refreshing instance network info cache due to event network-changed-6309f110-000f-4e57-a80d-4966b9d936ef. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 831.462253] env[65758]: DEBUG oslo_concurrency.lockutils [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Acquiring lock "refresh_cache-b7323030-4573-4af5-a19a-212a140d642a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.594081] env[65758]: DEBUG oslo_vmware.api [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660489, 'name': RemoveSnapshot_Task, 'duration_secs': 0.52733} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.594323] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 831.594563] env[65758]: INFO nova.compute.manager [None req-f85676ef-1869-44e1-a449-34010a0ee720 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Took 17.22 seconds to snapshot the instance on the hypervisor. [ 831.627283] env[65758]: WARNING neutronclient.v2_0.client [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 831.628044] env[65758]: WARNING openstack [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 831.628331] env[65758]: WARNING openstack [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 831.789853] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660492, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.801593] env[65758]: WARNING neutronclient.v2_0.client [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 831.802264] env[65758]: WARNING openstack [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 831.802716] env[65758]: WARNING openstack [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 831.815150] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 831.815382] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 831.855819] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 831.902533] env[65758]: DEBUG nova.network.neutron [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updated VIF entry in instance network info cache for port 972faaf3-0ee4-4d20-a393-b48d940dbae2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 831.902770] env[65758]: DEBUG nova.network.neutron [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updating instance_info_cache with network_info: [{"id": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "address": "fa:16:3e:f8:f1:bd", "network": {"id": "9309c768-973d-49e7-9bf3-20ef0ae70272", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-279647093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "60dcbdfe17cb46fa8dfc1b7690f28b1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap972faaf3-0e", "ovs_interfaceid": "972faaf3-0ee4-4d20-a393-b48d940dbae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 831.920728] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.751s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.920728] env[65758]: INFO nova.compute.manager [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Migrating [ 831.928396] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.183s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.930136] env[65758]: INFO nova.compute.claims [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.959946] env[65758]: WARNING neutronclient.v2_0.client [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 831.960643] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 831.961010] env[65758]: WARNING openstack [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 832.064320] env[65758]: DEBUG nova.network.neutron [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Updating instance_info_cache with network_info: [{"id": "6309f110-000f-4e57-a80d-4966b9d936ef", "address": "fa:16:3e:a8:38:c4", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6309f110-00", "ovs_interfaceid": "6309f110-000f-4e57-a80d-4966b9d936ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 832.289797] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660492, 'name': Rename_Task, 'duration_secs': 1.182519} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.290099] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 832.290361] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-977c2456-0467-43a0-9336-2dbb21c98d72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.301119] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 832.301119] env[65758]: value = "task-4660493" [ 832.301119] env[65758]: _type = "Task" [ 832.301119] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.310562] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.326906] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 832.326906] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 832.327178] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 832.327393] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 832.327543] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 832.327759] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 832.328029] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.328236] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 832.328480] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 832.328733] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 832.329187] env[65758]: DEBUG nova.virt.hardware [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 832.330089] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff36013-0bad-4e17-89b2-ce55853657f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.343703] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca69863-b611-44a5-8d01-39f3effc30ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.359743] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.366618] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 832.366618] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.366618] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fcc561d-1ca1-4207-a556-2ecefdd07f36 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.387970] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.387970] env[65758]: value = "task-4660494" [ 832.387970] env[65758]: _type = "Task" [ 832.387970] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.400380] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660494, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.406197] env[65758]: DEBUG oslo_concurrency.lockutils [req-72d66c95-bb5b-431d-baca-90edd3a9a1fc req-9a9c0106-8a29-45df-939e-cdddbc55e6db service nova] Releasing lock "refresh_cache-cca3e019-8e82-4473-8609-291703762a6e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.443812] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.443812] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.443812] env[65758]: DEBUG nova.network.neutron [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 832.567602] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "refresh_cache-b7323030-4573-4af5-a19a-212a140d642a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.568299] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Instance network_info: |[{"id": "6309f110-000f-4e57-a80d-4966b9d936ef", "address": "fa:16:3e:a8:38:c4", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6309f110-00", "ovs_interfaceid": "6309f110-000f-4e57-a80d-4966b9d936ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 832.569139] env[65758]: DEBUG oslo_concurrency.lockutils [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Acquired lock "refresh_cache-b7323030-4573-4af5-a19a-212a140d642a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.569139] env[65758]: DEBUG nova.network.neutron [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Refreshing network info cache for port 6309f110-000f-4e57-a80d-4966b9d936ef {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 832.570593] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:38:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '085fb0ff-9285-4f1d-a008-a14da4844357', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6309f110-000f-4e57-a80d-4966b9d936ef', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.579917] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 832.580796] env[65758]: WARNING neutronclient.v2_0.client [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 832.581627] env[65758]: WARNING openstack [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 832.582114] env[65758]: WARNING openstack [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 832.593371] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7323030-4573-4af5-a19a-212a140d642a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.594203] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78352017-aaf9-4719-9eaf-38b5f5c1f3db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.617982] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.617982] env[65758]: value = "task-4660495" [ 832.617982] env[65758]: _type = "Task" [ 832.617982] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.627924] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660495, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.792067] env[65758]: WARNING neutronclient.v2_0.client [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 832.792835] env[65758]: WARNING openstack [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 832.793205] env[65758]: WARNING openstack [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 832.811625] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660493, 'name': PowerOnVM_Task, 'duration_secs': 0.463018} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.811783] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 832.811995] env[65758]: INFO nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Took 10.20 seconds to spawn the instance on the hypervisor. [ 832.812191] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 832.813047] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ebfbce-c83d-4cc1-b394-ee5e63290732 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.898791] env[65758]: DEBUG nova.network.neutron [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Updated VIF entry in instance network info cache for port 6309f110-000f-4e57-a80d-4966b9d936ef. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 832.899195] env[65758]: DEBUG nova.network.neutron [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Updating instance_info_cache with network_info: [{"id": "6309f110-000f-4e57-a80d-4966b9d936ef", "address": "fa:16:3e:a8:38:c4", "network": {"id": "8971142a-75c4-4a77-860e-49bd5a72cac6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1888116654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4154e353eb4142178244814f4ebd6167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6309f110-00", "ovs_interfaceid": "6309f110-000f-4e57-a80d-4966b9d936ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 832.904408] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660494, 'name': CreateVM_Task, 'duration_secs': 0.295804} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.904613] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.905151] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.905363] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.905739] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 832.906295] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bad3c521-0827-410e-8d0e-6572b6a94323 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.912252] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 832.912252] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5252a9fd-0540-91aa-f49d-a94232348331" [ 832.912252] env[65758]: _type = "Task" [ 832.912252] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.923367] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5252a9fd-0540-91aa-f49d-a94232348331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.946125] env[65758]: WARNING neutronclient.v2_0.client [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 832.946462] env[65758]: WARNING openstack [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 832.946683] env[65758]: WARNING openstack [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.118668] env[65758]: WARNING neutronclient.v2_0.client [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 833.119161] env[65758]: WARNING openstack [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 833.121302] env[65758]: WARNING openstack [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 833.141556] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660495, 'name': CreateVM_Task, 'duration_secs': 0.327294} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.141983] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7323030-4573-4af5-a19a-212a140d642a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.142638] env[65758]: WARNING neutronclient.v2_0.client [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 833.143150] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.215579] env[65758]: DEBUG nova.network.neutron [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance_info_cache with network_info: [{"id": "ea073371-1ad8-47ae-9cca-67a419a8e219", "address": "fa:16:3e:e4:10:d3", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea073371-1a", "ovs_interfaceid": "ea073371-1ad8-47ae-9cca-67a419a8e219", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 833.336389] env[65758]: INFO nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Took 53.84 seconds to build instance. [ 833.406052] env[65758]: DEBUG oslo_concurrency.lockutils [req-7785d82d-ac7a-4b9c-afc8-130615ff0de3 req-7483d82d-0dae-4b19-a80a-89fa89ccf8b3 service nova] Releasing lock "refresh_cache-b7323030-4573-4af5-a19a-212a140d642a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.426452] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5252a9fd-0540-91aa-f49d-a94232348331, 'name': SearchDatastore_Task, 'duration_secs': 0.011817} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.426736] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.426968] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.427219] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.427364] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.427544] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.427934] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.428116] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.428338] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f141f273-ef7a-41db-ace4-bf9525771b94 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.432272] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac8d4c25-40a0-4222-8889-6f2b20e0a721 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.438594] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 833.438594] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b91f88-1765-9d8b-1d44-a9379652015e" [ 833.438594] env[65758]: _type = "Task" [ 833.438594] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.444262] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.444262] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.444592] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cacd900-9d01-4d87-b076-9a650425fedf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.452971] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b91f88-1765-9d8b-1d44-a9379652015e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.457309] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 833.457309] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ffe409-8aa2-c425-ed56-6d8caf2a869a" [ 833.457309] env[65758]: _type = "Task" [ 833.457309] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.465813] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ffe409-8aa2-c425-ed56-6d8caf2a869a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.527882] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb220d6-97ea-46d3-bee1-42ceff6d4e5b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.536564] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502d4fe2-5b91-4628-a41d-8bd176133565 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.567941] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b977bd-7f82-4158-97ac-bc9fda0d3202 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.576356] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307d9ed2-267b-4b29-8823-ada9687683c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.591122] env[65758]: DEBUG nova.compute.provider_tree [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.719138] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.839016] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "c1b9d81e-d747-4665-a083-26d8383f7645" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.304s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.950761] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b91f88-1765-9d8b-1d44-a9379652015e, 'name': SearchDatastore_Task, 'duration_secs': 0.025058} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.951101] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.951344] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.951551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.967846] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ffe409-8aa2-c425-ed56-6d8caf2a869a, 'name': SearchDatastore_Task, 'duration_secs': 0.024108} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.968674] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4de55833-4d29-4600-966a-c056eacbac23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.979360] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 833.979360] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52008778-8e0c-4648-15f3-bba4cd0da5d2" [ 833.979360] env[65758]: _type = "Task" [ 833.979360] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.990579] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52008778-8e0c-4648-15f3-bba4cd0da5d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.094225] env[65758]: DEBUG nova.scheduler.client.report [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 834.341045] env[65758]: DEBUG nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 834.476106] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "a662eac8-07e2-47f1-a4dd-9abbe824817d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.476376] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "a662eac8-07e2-47f1-a4dd-9abbe824817d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.476712] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "a662eac8-07e2-47f1-a4dd-9abbe824817d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.476914] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "a662eac8-07e2-47f1-a4dd-9abbe824817d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.477098] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "a662eac8-07e2-47f1-a4dd-9abbe824817d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.479505] env[65758]: INFO nova.compute.manager [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Terminating instance [ 834.492759] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52008778-8e0c-4648-15f3-bba4cd0da5d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011955} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.493683] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.493948] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5/548edde0-9e42-4cd3-bdd3-3615ab9b7fc5.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.494254] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.494439] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.494665] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b370966-446e-4a96-b8ae-af45a3d39657 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.497453] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-172ff440-ddec-4b4b-aef4-4fff446e702e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.507592] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 834.507592] env[65758]: value = "task-4660496" [ 834.507592] env[65758]: _type = "Task" [ 834.507592] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.508818] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.508999] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.512830] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b6a4bcc-771c-457b-af46-e750e433dbb7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.519029] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 834.519029] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527634e3-ea8f-a298-7aca-71f5f9d395f5" [ 834.519029] env[65758]: _type = "Task" [ 834.519029] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.522276] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.531079] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527634e3-ea8f-a298-7aca-71f5f9d395f5, 'name': SearchDatastore_Task, 'duration_secs': 0.009335} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.532115] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adc067ad-b31f-41c7-b344-a757e353858e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.538030] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 834.538030] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d7ac7a-10db-5931-a89e-4b17a1a0d95d" [ 834.538030] env[65758]: _type = "Task" [ 834.538030] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.545849] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d7ac7a-10db-5931-a89e-4b17a1a0d95d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.599541] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.600123] env[65758]: DEBUG nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 834.602994] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 38.822s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.984368] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.987313] env[65758]: DEBUG nova.compute.manager [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 834.987569] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.988581] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3041092-9e63-4c19-b814-e66a868b4ab7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.998743] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.999112] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8ebed7e-36f7-4e9a-b3be-d597236de594 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.008466] env[65758]: DEBUG oslo_vmware.api [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 835.008466] env[65758]: value = "task-4660497" [ 835.008466] env[65758]: _type = "Task" [ 835.008466] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.028108] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660496, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.028405] env[65758]: DEBUG oslo_vmware.api [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.052280] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d7ac7a-10db-5931-a89e-4b17a1a0d95d, 'name': SearchDatastore_Task, 'duration_secs': 0.008744} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.052737] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.053180] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] b7323030-4573-4af5-a19a-212a140d642a/b7323030-4573-4af5-a19a-212a140d642a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.054023] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f763d482-283b-4a33-81f4-cc277cf9bb06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.064200] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 835.064200] env[65758]: value = "task-4660498" [ 835.064200] env[65758]: _type = "Task" [ 835.064200] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.074707] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660498, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.119150] env[65758]: DEBUG nova.compute.utils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 835.120802] env[65758]: DEBUG nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 835.122707] env[65758]: DEBUG nova.network.neutron [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 835.122707] env[65758]: WARNING neutronclient.v2_0.client [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 835.122707] env[65758]: WARNING neutronclient.v2_0.client [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 835.122707] env[65758]: WARNING openstack [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 835.122707] env[65758]: WARNING openstack [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 835.171455] env[65758]: DEBUG nova.policy [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '638c09153b9e4ec09f9fb5f87ba1e0ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '693b129cd84f4eee9971e7221e92c3e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 835.238637] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa669a7-4521-4c51-8443-8dbe0f48af27 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.261332] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance '105c53ce-e657-4a29-bc7f-96b4f885707a' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 835.518068] env[65758]: DEBUG nova.network.neutron [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Successfully created port: 216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 835.527428] env[65758]: DEBUG oslo_vmware.api [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660497, 'name': PowerOffVM_Task, 'duration_secs': 0.396689} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.530929] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.531185] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.531488] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526733} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.531707] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbe02e89-1e93-4b63-b4bf-878a5f95b93f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.533526] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5/548edde0-9e42-4cd3-bdd3-3615ab9b7fc5.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.533800] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.534011] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aeb28e28-8780-4516-bc9f-310efc6e695d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.545321] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 835.545321] env[65758]: value = "task-4660500" [ 835.545321] env[65758]: _type = "Task" [ 835.545321] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.564407] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.576786] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660498, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.622166] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Applying migration context for instance 105c53ce-e657-4a29-bc7f-96b4f885707a as it has an incoming, in-progress migration 37e0bba4-7690-4c4c-9e66-0b8b93f50a0f. Migration status is migrating {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 835.626285] env[65758]: INFO nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating resource usage from migration 37e0bba4-7690-4c4c-9e66-0b8b93f50a0f [ 835.626796] env[65758]: INFO nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating resource usage from migration cbce059b-48af-4be4-a4d3-19366314e65f [ 835.632755] env[65758]: DEBUG nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 835.641107] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.641447] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.641665] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Deleting the datastore file [datastore1] a662eac8-07e2-47f1-a4dd-9abbe824817d {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.642068] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5aa585d8-3cc4-4dcd-b137-eec62bb95295 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.650433] env[65758]: DEBUG oslo_vmware.api [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 835.650433] env[65758]: value = "task-4660501" [ 835.650433] env[65758]: _type = "Task" [ 835.650433] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.660914] env[65758]: DEBUG oslo_vmware.api [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.662137] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 37aadd44-79e8-4479-862f-265549c9d802 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.662281] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 2d787237-26e5-4519-9f6e-1d30b9d016cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.662429] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.662653] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 1e249ca9-a7a8-440f-832b-a8f5d84ada8b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 835.662809] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 83fa942b-a195-4bcb-9ed5-5bb6764220a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.662990] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 9e007d55-0a5c-4469-a546-9b18e188bea0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 835.663158] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ec1e2845-e73a-40ff-9b6c-1d8281859fba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.663272] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 974d06c1-2704-4a78-bbd7-f54335c4288e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.663382] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e6159a35-f073-4931-b0b0-832a88680356 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.663491] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance cca3e019-8e82-4473-8609-291703762a6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.663594] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance f15c6953-f76b-44eb-bd1b-c0d3adddc163 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.663696] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a662eac8-07e2-47f1-a4dd-9abbe824817d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.663799] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 47bb5b02-4f84-468e-ad46-2c1c96b65c97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.663930] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 5e54e7f4-3df1-4283-bee1-a7e475051a24 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 835.664089] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 56ff4122-a999-4caf-b805-0754a66d6bc7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 835.664213] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 81f961c3-ec8f-4281-be18-5d605fa73ecc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 835.664333] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance b7e2a3d9-7db3-40b3-98a5-c6e6e040a947 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.664427] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.664542] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 596a5005-3607-44a2-9c0e-f1a56865011c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 835.664707] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance d60aaa5c-913f-4550-a4d5-ab994048da9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.664827] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.664873] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance c1b9d81e-d747-4665-a083-26d8383f7645 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.664929] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance b7323030-4573-4af5-a19a-212a140d642a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.665041] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 835.769719] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.770920] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39cacfde-2d22-44a7-b32d-33e8b4c79b99 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.778751] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 835.778751] env[65758]: value = "task-4660502" [ 835.778751] env[65758]: _type = "Task" [ 835.778751] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.789140] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.058953] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096334} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.059265] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.060308] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aabbc42-a39a-4726-91f6-124f5046619a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.086263] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5/548edde0-9e42-4cd3-bdd3-3615ab9b7fc5.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.087016] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53fb8632-d650-45ea-a44f-55355663d2f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.106057] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660498, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718871} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.106799] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] b7323030-4573-4af5-a19a-212a140d642a/b7323030-4573-4af5-a19a-212a140d642a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.107466] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.107466] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dcfde7b1-3c15-48f7-b554-35ebf5a3239e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.112842] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 836.112842] env[65758]: value = "task-4660503" [ 836.112842] env[65758]: _type = "Task" [ 836.112842] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.117618] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 836.117618] env[65758]: value = "task-4660504" [ 836.117618] env[65758]: _type = "Task" [ 836.117618] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.125240] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660503, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.131734] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.160879] env[65758]: DEBUG oslo_vmware.api [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.406261} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.161242] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.161439] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.161620] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.161808] env[65758]: INFO nova.compute.manager [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Took 1.17 seconds to destroy the instance on the hypervisor. [ 836.162110] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 836.162298] env[65758]: DEBUG nova.compute.manager [-] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 836.162453] env[65758]: DEBUG nova.network.neutron [-] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 836.162692] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 836.164045] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 836.164045] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 836.170900] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance d42d0818-1486-4696-9871-2cf989aeb885 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 836.230649] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 836.290042] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660502, 'name': PowerOffVM_Task, 'duration_secs': 0.275299} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.290342] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.290790] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance '105c53ce-e657-4a29-bc7f-96b4f885707a' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 836.625943] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660503, 'name': ReconfigVM_Task, 'duration_secs': 0.294062} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.626674] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5/548edde0-9e42-4cd3-bdd3-3615ab9b7fc5.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.627352] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7287483-3abe-4816-9e4c-b4c13e6579fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.633043] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071233} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.633043] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.634097] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb33777-26e7-4ab1-9af7-8425b0383256 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.638507] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 836.638507] env[65758]: value = "task-4660505" [ 836.638507] env[65758]: _type = "Task" [ 836.638507] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.652619] env[65758]: DEBUG nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 836.662440] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] b7323030-4573-4af5-a19a-212a140d642a/b7323030-4573-4af5-a19a-212a140d642a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.663530] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-799e1b25-d2ca-40b3-9f1a-e21551674304 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.682477] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 85082b72-89dd-47b7-b8ad-f2ad5ad0638d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 836.684554] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660505, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.695581] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 836.695835] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 836.696039] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 836.696221] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 836.696364] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 836.696510] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 836.696715] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.696959] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 836.697114] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 836.697244] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 836.697419] env[65758]: DEBUG nova.virt.hardware [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 836.698815] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66c2454-652c-4d47-be5e-c52e6bdc85ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.704337] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 836.704337] env[65758]: value = "task-4660506" [ 836.704337] env[65758]: _type = "Task" [ 836.704337] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.718174] env[65758]: DEBUG nova.compute.manager [req-b7ef1460-9b7d-4211-95ef-6c165de0ee85 req-58bc93b4-ce4d-430d-8754-6fd8b2930421 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Received event network-vif-deleted-31fb2cea-c496-4afb-99ad-ed2c4eb852bc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 836.718174] env[65758]: INFO nova.compute.manager [req-b7ef1460-9b7d-4211-95ef-6c165de0ee85 req-58bc93b4-ce4d-430d-8754-6fd8b2930421 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Neutron deleted interface 31fb2cea-c496-4afb-99ad-ed2c4eb852bc; detaching it from the instance and deleting it from the info cache [ 836.718174] env[65758]: DEBUG nova.network.neutron [req-b7ef1460-9b7d-4211-95ef-6c165de0ee85 req-58bc93b4-ce4d-430d-8754-6fd8b2930421 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 836.721460] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3d0f74-7d7d-45d7-8720-163c56be9aa1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.731441] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.798812] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 836.799211] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 836.799775] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 836.799775] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 836.800066] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 836.800361] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 836.800714] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.800950] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 836.801217] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 836.801454] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 836.801704] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 836.812122] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2bea8c7-1019-492d-a216-566004ebe839 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.837065] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 836.837065] env[65758]: value = "task-4660507" [ 836.837065] env[65758]: _type = "Task" [ 836.837065] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.846706] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660507, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.950891] env[65758]: DEBUG nova.network.neutron [-] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 837.105981] env[65758]: DEBUG nova.compute.manager [req-d91a119b-4a79-4520-8d32-c372fdfb6f0f req-321b2c3b-355a-4b12-9f55-d09e3efdec2f service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Received event network-vif-plugged-216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 837.106293] env[65758]: DEBUG oslo_concurrency.lockutils [req-d91a119b-4a79-4520-8d32-c372fdfb6f0f req-321b2c3b-355a-4b12-9f55-d09e3efdec2f service nova] Acquiring lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.106601] env[65758]: DEBUG oslo_concurrency.lockutils [req-d91a119b-4a79-4520-8d32-c372fdfb6f0f req-321b2c3b-355a-4b12-9f55-d09e3efdec2f service nova] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.106844] env[65758]: DEBUG oslo_concurrency.lockutils [req-d91a119b-4a79-4520-8d32-c372fdfb6f0f req-321b2c3b-355a-4b12-9f55-d09e3efdec2f service nova] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.107083] env[65758]: DEBUG nova.compute.manager [req-d91a119b-4a79-4520-8d32-c372fdfb6f0f req-321b2c3b-355a-4b12-9f55-d09e3efdec2f service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] No waiting events found dispatching network-vif-plugged-216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 837.107317] env[65758]: WARNING nova.compute.manager [req-d91a119b-4a79-4520-8d32-c372fdfb6f0f req-321b2c3b-355a-4b12-9f55-d09e3efdec2f service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Received unexpected event network-vif-plugged-216bffab-4451-407d-b8dd-9e8687a90b81 for instance with vm_state building and task_state spawning. [ 837.136171] env[65758]: DEBUG nova.network.neutron [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Successfully updated port: 216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 837.152483] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660505, 'name': Rename_Task, 'duration_secs': 0.150327} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.152818] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.153409] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a69fa5e-61ca-4e24-92bc-3299d502e667 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.162638] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 837.162638] env[65758]: value = "task-4660508" [ 837.162638] env[65758]: _type = "Task" [ 837.162638] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.172570] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.188952] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 96103549-80a5-462d-9f73-f5f6363ab9fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 837.216405] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660506, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.231809] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10c3ae54-f0f9-4dcb-b14e-bb2ebd9623d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.242119] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ac5fc6-8a8d-49bf-96c2-a3c3236d154d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.280474] env[65758]: DEBUG nova.compute.manager [req-b7ef1460-9b7d-4211-95ef-6c165de0ee85 req-58bc93b4-ce4d-430d-8754-6fd8b2930421 service nova] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Detach interface failed, port_id=31fb2cea-c496-4afb-99ad-ed2c4eb852bc, reason: Instance a662eac8-07e2-47f1-a4dd-9abbe824817d could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 837.348379] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660507, 'name': ReconfigVM_Task, 'duration_secs': 0.288017} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.348988] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance '105c53ce-e657-4a29-bc7f-96b4f885707a' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 837.453170] env[65758]: INFO nova.compute.manager [-] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Took 1.29 seconds to deallocate network for instance. [ 837.640052] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.640052] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.640052] env[65758]: DEBUG nova.network.neutron [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 837.674140] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660508, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.692665] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 7f5911fb-785e-444c-9408-c6884e06c5d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 837.692665] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Migration 37e0bba4-7690-4c4c-9e66-0b8b93f50a0f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 837.692861] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 105c53ce-e657-4a29-bc7f-96b4f885707a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 837.719544] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660506, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.856269] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:16:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='346c523a-8d39-4f4e-a2d8-eb4e1ab4f9a4',id=28,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1141065059',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 837.856540] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 837.856737] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 837.856971] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 837.857175] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 837.857363] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 837.857612] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.857814] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 837.858110] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 837.858255] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 837.858469] env[65758]: DEBUG nova.virt.hardware [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 837.864045] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 837.864398] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bc41e43-4ceb-4fe5-9806-a1e6d91bbc16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.883742] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 837.883742] env[65758]: value = "task-4660509" [ 837.883742] env[65758]: _type = "Task" [ 837.883742] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.892302] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660509, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.960530] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.145028] env[65758]: WARNING openstack [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 838.146605] env[65758]: WARNING openstack [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 838.174317] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660508, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.182164] env[65758]: DEBUG nova.network.neutron [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 838.196254] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 838.217690] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660506, 'name': ReconfigVM_Task, 'duration_secs': 1.303366} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.218009] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Reconfigured VM instance instance-0000003a to attach disk [datastore1] b7323030-4573-4af5-a19a-212a140d642a/b7323030-4573-4af5-a19a-212a140d642a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.218701] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a189b82b-a995-4727-a531-7fbc1000e168 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.226931] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 838.226931] env[65758]: value = "task-4660510" [ 838.226931] env[65758]: _type = "Task" [ 838.226931] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.240390] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660510, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.263204] env[65758]: WARNING neutronclient.v2_0.client [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 838.264117] env[65758]: WARNING openstack [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 838.264620] env[65758]: WARNING openstack [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 838.365612] env[65758]: DEBUG nova.network.neutron [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [{"id": "216bffab-4451-407d-b8dd-9e8687a90b81", "address": "fa:16:3e:74:3c:81", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216bffab-44", "ovs_interfaceid": "216bffab-4451-407d-b8dd-9e8687a90b81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 838.396655] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660509, 'name': ReconfigVM_Task, 'duration_secs': 0.170024} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.397064] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 838.397946] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30eef91e-dbbc-4386-8971-8424d2fe043b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.423769] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a/105c53ce-e657-4a29-bc7f-96b4f885707a.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 838.424102] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-412626da-12f4-45b8-ace6-be76bf0bf558 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.443557] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 838.443557] env[65758]: value = "task-4660511" [ 838.443557] env[65758]: _type = "Task" [ 838.443557] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.452448] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660511, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.675320] env[65758]: DEBUG oslo_vmware.api [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660508, 'name': PowerOnVM_Task, 'duration_secs': 1.238455} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.675704] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.675923] env[65758]: DEBUG nova.compute.manager [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 838.676766] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d534b56-aaf0-4085-9332-d13b234ffd16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.701146] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 454bd092-f683-4a3a-91c9-65191d6996f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 838.740616] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660510, 'name': Rename_Task, 'duration_secs': 0.177914} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.741124] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.741268] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fee8bcc7-ca28-4b8f-9faf-8433bfba0956 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.749629] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 838.749629] env[65758]: value = "task-4660512" [ 838.749629] env[65758]: _type = "Task" [ 838.749629] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.759967] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660512, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.867888] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.868317] env[65758]: DEBUG nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Instance network_info: |[{"id": "216bffab-4451-407d-b8dd-9e8687a90b81", "address": "fa:16:3e:74:3c:81", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216bffab-44", "ovs_interfaceid": "216bffab-4451-407d-b8dd-9e8687a90b81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 838.868799] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:3c:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd77ecbc-aaaf-45f4-ae8f-977d90e4052f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '216bffab-4451-407d-b8dd-9e8687a90b81', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.876739] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 838.876970] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.877229] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f25485f7-b233-401c-a420-573fa9c0901b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.902153] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.902153] env[65758]: value = "task-4660513" [ 838.902153] env[65758]: _type = "Task" [ 838.902153] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.911767] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660513, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.954212] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660511, 'name': ReconfigVM_Task, 'duration_secs': 0.267463} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.954374] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a/105c53ce-e657-4a29-bc7f-96b4f885707a.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.954589] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance '105c53ce-e657-4a29-bc7f-96b4f885707a' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 839.195309] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.204440] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 31816c0c-d7d2-48db-9a87-a1e03c938a60 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 839.209182] env[65758]: DEBUG nova.compute.manager [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Received event network-changed-216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 839.210746] env[65758]: DEBUG nova.compute.manager [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Refreshing instance network info cache due to event network-changed-216bffab-4451-407d-b8dd-9e8687a90b81. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 839.210746] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] Acquiring lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.210746] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] Acquired lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.210746] env[65758]: DEBUG nova.network.neutron [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Refreshing network info cache for port 216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 839.262252] env[65758]: DEBUG oslo_vmware.api [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660512, 'name': PowerOnVM_Task, 'duration_secs': 0.501862} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.262422] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.262631] env[65758]: INFO nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Took 9.06 seconds to spawn the instance on the hypervisor. [ 839.262806] env[65758]: DEBUG nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 839.263656] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a4fa5b-5885-46c5-bc53-7c4c2fd04313 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.413393] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660513, 'name': CreateVM_Task, 'duration_secs': 0.399809} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.413393] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.414722] env[65758]: WARNING neutronclient.v2_0.client [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 839.414722] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.414722] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.414977] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 839.415107] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f0d8475-6348-4056-baff-24bfa78749ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.420829] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 839.420829] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52cb6bc9-9199-2fb7-3201-caefcdd8aaf3" [ 839.420829] env[65758]: _type = "Task" [ 839.420829] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.430032] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52cb6bc9-9199-2fb7-3201-caefcdd8aaf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.463244] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af90a146-5ca6-4153-9f52-faf67f446ca8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.483403] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134ca893-ffb4-479d-a13c-131db9985839 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.507178] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance '105c53ce-e657-4a29-bc7f-96b4f885707a' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 839.715700] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 839.715700] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Migration cbce059b-48af-4be4-a4d3-19366314e65f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 839.715700] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a9ec9a64-94c7-41a5-a7a4-5e034ddfc592 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 839.717989] env[65758]: WARNING neutronclient.v2_0.client [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 839.718121] env[65758]: WARNING openstack [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 839.718631] env[65758]: WARNING openstack [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 839.781932] env[65758]: INFO nova.compute.manager [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Took 54.36 seconds to build instance. [ 839.891288] env[65758]: WARNING neutronclient.v2_0.client [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 839.891941] env[65758]: WARNING openstack [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 839.892294] env[65758]: WARNING openstack [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 839.932561] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52cb6bc9-9199-2fb7-3201-caefcdd8aaf3, 'name': SearchDatastore_Task, 'duration_secs': 0.0256} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.933104] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.933405] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.933553] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.933710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.933869] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.934169] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d340bec-9c02-424c-8b52-d2a5a3665d49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.945013] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.945192] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.945982] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15744db0-52ad-4efc-b51c-e6f647280aa8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.957474] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 839.957474] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52791f03-05c8-8241-02bf-4b057c37ecb4" [ 839.957474] env[65758]: _type = "Task" [ 839.957474] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.966468] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52791f03-05c8-8241-02bf-4b057c37ecb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.991672] env[65758]: DEBUG nova.network.neutron [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updated VIF entry in instance network info cache for port 216bffab-4451-407d-b8dd-9e8687a90b81. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 839.991672] env[65758]: DEBUG nova.network.neutron [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [{"id": "216bffab-4451-407d-b8dd-9e8687a90b81", "address": "fa:16:3e:74:3c:81", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216bffab-44", "ovs_interfaceid": "216bffab-4451-407d-b8dd-9e8687a90b81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 840.014099] env[65758]: WARNING neutronclient.v2_0.client [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 840.014471] env[65758]: WARNING neutronclient.v2_0.client [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 840.059131] env[65758]: DEBUG nova.network.neutron [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Port ea073371-1ad8-47ae-9cca-67a419a8e219 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 840.128970] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.129323] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.129584] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.129826] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.130011] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.133062] env[65758]: INFO nova.compute.manager [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Terminating instance [ 840.228395] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 79c63944-c4c8-4c7c-bc42-3f958d737e66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 840.284462] env[65758]: DEBUG oslo_concurrency.lockutils [None req-282ad1f5-6408-44e5-b9bb-922b3aa33fa7 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7323030-4573-4af5-a19a-212a140d642a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.690s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.306624] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "c1b9d81e-d747-4665-a083-26d8383f7645" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.306951] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "c1b9d81e-d747-4665-a083-26d8383f7645" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.307261] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "c1b9d81e-d747-4665-a083-26d8383f7645-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.307511] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "c1b9d81e-d747-4665-a083-26d8383f7645-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.307720] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "c1b9d81e-d747-4665-a083-26d8383f7645-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.309930] env[65758]: INFO nova.compute.manager [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Terminating instance [ 840.407348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "b7323030-4573-4af5-a19a-212a140d642a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.408034] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7323030-4573-4af5-a19a-212a140d642a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.408034] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "b7323030-4573-4af5-a19a-212a140d642a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.408034] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7323030-4573-4af5-a19a-212a140d642a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.408240] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7323030-4573-4af5-a19a-212a140d642a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.410321] env[65758]: INFO nova.compute.manager [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Terminating instance [ 840.470216] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52791f03-05c8-8241-02bf-4b057c37ecb4, 'name': SearchDatastore_Task, 'duration_secs': 0.027046} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.471140] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-362b8889-fd83-485f-89ca-0fbbdf3524db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.478397] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 840.478397] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5209b02f-358f-fbac-67dd-4d8f789f3b22" [ 840.478397] env[65758]: _type = "Task" [ 840.478397] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.487314] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5209b02f-358f-fbac-67dd-4d8f789f3b22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.493957] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8bb66ca-52ce-4a90-b93f-189724f04d59 req-c129c21d-db7a-47ed-9439-ad1c9c444360 service nova] Releasing lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.635817] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "refresh_cache-548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.636050] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired lock "refresh_cache-548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.636257] env[65758]: DEBUG nova.network.neutron [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 840.732185] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance df46c28d-7cbd-490e-8db2-9730e4d9f953 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 840.787807] env[65758]: DEBUG nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 840.815571] env[65758]: DEBUG nova.compute.manager [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 840.815571] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 840.816067] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b6b361-2977-4297-ac42-a79d1bd233f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.825687] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 840.826732] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-204ddee1-08c1-4548-94e2-1d7cdeeb5ccd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.835511] env[65758]: DEBUG oslo_vmware.api [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 840.835511] env[65758]: value = "task-4660514" [ 840.835511] env[65758]: _type = "Task" [ 840.835511] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.847034] env[65758]: DEBUG oslo_vmware.api [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.914828] env[65758]: DEBUG nova.compute.manager [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 840.914828] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 840.915542] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c21f64-3051-4e1c-b154-0b3cfd67b9a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.924612] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 840.924893] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4181e19-d171-4a75-80f1-16dbac98ef49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.932958] env[65758]: DEBUG oslo_vmware.api [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 840.932958] env[65758]: value = "task-4660515" [ 840.932958] env[65758]: _type = "Task" [ 840.932958] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.944150] env[65758]: DEBUG oslo_vmware.api [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660515, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.991517] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5209b02f-358f-fbac-67dd-4d8f789f3b22, 'name': SearchDatastore_Task, 'duration_secs': 0.017219} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.992045] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.992567] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49/4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.993748] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47392cc4-b84b-4c58-bc45-19f134ff57dc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.004054] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 841.004054] env[65758]: value = "task-4660516" [ 841.004054] env[65758]: _type = "Task" [ 841.004054] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.015604] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660516, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.085388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.085662] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.085839] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.139540] env[65758]: WARNING neutronclient.v2_0.client [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 841.140407] env[65758]: WARNING openstack [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 841.140710] env[65758]: WARNING openstack [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 841.167677] env[65758]: DEBUG nova.network.neutron [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 841.232626] env[65758]: DEBUG nova.network.neutron [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 841.234825] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e93528eb-33d0-46d1-94e8-d1d66f2c682f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 841.315307] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.346959] env[65758]: DEBUG oslo_vmware.api [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660514, 'name': PowerOffVM_Task, 'duration_secs': 0.235966} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.347363] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.347498] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 841.347805] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8114adf4-8671-4a2b-99e8-323bf3fb531e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.426154] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 841.426585] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 841.427340] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleting the datastore file [datastore2] c1b9d81e-d747-4665-a083-26d8383f7645 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.427439] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0002f82f-3612-4423-af56-d6e98201ee51 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.438868] env[65758]: DEBUG oslo_vmware.api [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 841.438868] env[65758]: value = "task-4660518" [ 841.438868] env[65758]: _type = "Task" [ 841.438868] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.446615] env[65758]: DEBUG oslo_vmware.api [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660515, 'name': PowerOffVM_Task, 'duration_secs': 0.223665} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.447365] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.447697] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 841.447835] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea2d0f37-4228-4644-8391-498a18bad6f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.456032] env[65758]: DEBUG oslo_vmware.api [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.517165] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660516, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.531030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 841.531030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 841.531030] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleting the datastore file [datastore1] b7323030-4573-4af5-a19a-212a140d642a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.531807] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f28abc43-8fad-4a18-a223-dd592de911f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.541093] env[65758]: DEBUG oslo_vmware.api [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for the task: (returnval){ [ 841.541093] env[65758]: value = "task-4660520" [ 841.541093] env[65758]: _type = "Task" [ 841.541093] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.550213] env[65758]: DEBUG oslo_vmware.api [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.735786] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Releasing lock "refresh_cache-548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.736287] env[65758]: DEBUG nova.compute.manager [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 841.736482] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 841.737214] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ba3153f2-8e6f-469c-8730-957c5eebe97b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 841.739061] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b15f58-0e3b-4997-9d4c-17c872ad30d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.747901] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.748367] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1156cf4c-74f9-41c9-b9dc-becb76a8f927 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.756785] env[65758]: DEBUG oslo_vmware.api [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 841.756785] env[65758]: value = "task-4660521" [ 841.756785] env[65758]: _type = "Task" [ 841.756785] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.766246] env[65758]: DEBUG oslo_vmware.api [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.950237] env[65758]: DEBUG oslo_vmware.api [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239815} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.951154] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 841.951154] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 841.951324] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 841.951528] env[65758]: INFO nova.compute.manager [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Took 1.14 seconds to destroy the instance on the hypervisor. [ 841.951900] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 841.952168] env[65758]: DEBUG nova.compute.manager [-] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 841.952270] env[65758]: DEBUG nova.network.neutron [-] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 841.952628] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 841.953235] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 841.953560] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 842.015272] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660516, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543824} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.015569] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49/4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.015781] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.016057] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc9e93a9-2e9e-4b82-84e6-6175ac6049d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.025233] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 842.025233] env[65758]: value = "task-4660522" [ 842.025233] env[65758]: _type = "Task" [ 842.025233] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.045442] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660522, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.055357] env[65758]: DEBUG oslo_vmware.api [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Task: {'id': task-4660520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181821} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.055566] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 842.055775] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 842.056000] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 842.056222] env[65758]: INFO nova.compute.manager [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] [instance: b7323030-4573-4af5-a19a-212a140d642a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 842.056519] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 842.056774] env[65758]: DEBUG nova.compute.manager [-] [instance: b7323030-4573-4af5-a19a-212a140d642a] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 842.056869] env[65758]: DEBUG nova.network.neutron [-] [instance: b7323030-4573-4af5-a19a-212a140d642a] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 842.057177] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.057717] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 842.059031] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 842.084120] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.095092] env[65758]: WARNING neutronclient.v2_0.client [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.105842] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.215651] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.215841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.216030] env[65758]: DEBUG nova.network.neutron [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 842.244153] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 875cbc88-f817-4ea8-a969-b97e875918d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 842.244520] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 842.244696] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4672MB phys_disk=100GB used_disk=21GB total_vcpus=48 used_vcpus=21 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '20', 'num_vm_active': '17', 'num_task_None': '15', 'num_os_type_None': '20', 'num_proj_cdaabf2897064b5a948dbdb6d5921d76': '2', 'io_workload': '4', 'num_proj_e114eef3998848699a9a086fee86db29': '1', 'num_proj_9aaf5b39abda42f28a847d5fe0d0ecec': '2', 'num_proj_693b129cd84f4eee9971e7221e92c3e0': '3', 'num_vm_rescued': '1', 'num_proj_c4c2ab2b80c04c38bfb4c7cafac87fe6': '1', 'num_proj_60dcbdfe17cb46fa8dfc1b7690f28b1f': '1', 'num_proj_0a4045fe12c0401fbb68bff8def4e9ea': '2', 'num_task_deleting': '1', 'num_proj_296e50c9805843949e592a0ab985d3a3': '1', 'num_task_resize_migrating': '1', 'num_task_resize_prep': '1', 'num_proj_fd550f85853f447bb91a89b6bc6c5720': '1', 'num_proj_6192e0af007d495c85f98e1a72ab56eb': '1', 'num_proj_111dc87614bb42e2bc66ae1bfb092795': '1', 'num_proj_4ccf42aff94443239bf03f0aad58567f': '2', 'num_task_rebuild_spawning': '1', 'num_proj_4154e353eb4142178244814f4ebd6167': '2', 'num_vm_building': '2', 'num_task_spawning': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 842.269433] env[65758]: DEBUG oslo_vmware.api [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660521, 'name': PowerOffVM_Task, 'duration_secs': 0.210733} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.269748] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.269906] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 842.270220] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3517607f-ddd6-4001-ac90-bd7ac7061f70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.299269] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 842.299503] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 842.299681] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Deleting the datastore file [datastore1] 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 842.300202] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe284e79-cdf7-47cb-9590-7a9214ab50ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.312047] env[65758]: DEBUG oslo_vmware.api [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 842.312047] env[65758]: value = "task-4660524" [ 842.312047] env[65758]: _type = "Task" [ 842.312047] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.320299] env[65758]: DEBUG oslo_vmware.api [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.537052] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660522, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076297} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.537052] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.537944] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b2d068-703a-4379-908d-8818d8f90ec9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.562451] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49/4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.562838] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26445130-6a16-4de2-8f02-ca6deccd3e99 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.585879] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 842.585879] env[65758]: value = "task-4660525" [ 842.585879] env[65758]: _type = "Task" [ 842.585879] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.594861] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660525, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.631676] env[65758]: DEBUG nova.compute.manager [req-384c4e88-6650-4cb6-880d-2b4ebac9dda2 req-13132906-109f-4ca1-b92e-975819345667 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Received event network-vif-deleted-390c38a1-9300-466c-ab54-f85316e00181 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 842.631676] env[65758]: INFO nova.compute.manager [req-384c4e88-6650-4cb6-880d-2b4ebac9dda2 req-13132906-109f-4ca1-b92e-975819345667 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Neutron deleted interface 390c38a1-9300-466c-ab54-f85316e00181; detaching it from the instance and deleting it from the info cache [ 842.631676] env[65758]: DEBUG nova.network.neutron [req-384c4e88-6650-4cb6-880d-2b4ebac9dda2 req-13132906-109f-4ca1-b92e-975819345667 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 842.719886] env[65758]: DEBUG nova.compute.manager [req-e129fd66-8975-4246-937b-ef68df6a387a req-79f028e7-98b2-45cb-b698-649f434a8b4e service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Received event network-vif-deleted-6309f110-000f-4e57-a80d-4966b9d936ef {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 842.720160] env[65758]: INFO nova.compute.manager [req-e129fd66-8975-4246-937b-ef68df6a387a req-79f028e7-98b2-45cb-b698-649f434a8b4e service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Neutron deleted interface 6309f110-000f-4e57-a80d-4966b9d936ef; detaching it from the instance and deleting it from the info cache [ 842.720358] env[65758]: DEBUG nova.network.neutron [req-e129fd66-8975-4246-937b-ef68df6a387a req-79f028e7-98b2-45cb-b698-649f434a8b4e service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 842.721812] env[65758]: WARNING neutronclient.v2_0.client [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.722640] env[65758]: WARNING openstack [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 842.722897] env[65758]: WARNING openstack [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 842.818875] env[65758]: DEBUG oslo_vmware.api [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190074} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.822064] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 842.822156] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 842.822337] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 842.822650] env[65758]: INFO nova.compute.manager [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Took 1.09 seconds to destroy the instance on the hypervisor. [ 842.823120] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 842.823603] env[65758]: DEBUG nova.compute.manager [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 842.823690] env[65758]: DEBUG nova.network.neutron [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 842.823936] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.824623] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 842.824744] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 842.842767] env[65758]: DEBUG nova.network.neutron [-] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 842.863283] env[65758]: DEBUG nova.network.neutron [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 842.864097] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.931085] env[65758]: WARNING neutronclient.v2_0.client [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 842.932128] env[65758]: WARNING openstack [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 842.932802] env[65758]: WARNING openstack [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 842.956262] env[65758]: DEBUG nova.network.neutron [-] [instance: b7323030-4573-4af5-a19a-212a140d642a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 843.039539] env[65758]: DEBUG nova.network.neutron [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance_info_cache with network_info: [{"id": "ea073371-1ad8-47ae-9cca-67a419a8e219", "address": "fa:16:3e:e4:10:d3", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea073371-1a", "ovs_interfaceid": "ea073371-1ad8-47ae-9cca-67a419a8e219", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 843.048644] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e6247f-f086-4069-9ef4-a15a34002a8f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.060891] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89db0b5-c16c-48e9-b032-47c0ddd59651 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.101116] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0b9196-b16f-4b78-86b7-7c17084d2737 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.109819] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660525, 'name': ReconfigVM_Task, 'duration_secs': 0.344693} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.112155] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49/4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.112877] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-121a1fbc-4856-452e-ba72-2eee7dbfaae3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.115491] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba58a7e-ea6b-4e43-a2b1-3dbac9bf995e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.133063] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.138529] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 843.138529] env[65758]: value = "task-4660526" [ 843.138529] env[65758]: _type = "Task" [ 843.138529] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.138529] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0844569b-470a-4fa1-9ef0-cb1262390833 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.149303] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660526, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.153126] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cae66b4-e3b4-45ee-81b9-3d4728fb4489 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.191085] env[65758]: DEBUG nova.compute.manager [req-384c4e88-6650-4cb6-880d-2b4ebac9dda2 req-13132906-109f-4ca1-b92e-975819345667 service nova] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Detach interface failed, port_id=390c38a1-9300-466c-ab54-f85316e00181, reason: Instance c1b9d81e-d747-4665-a083-26d8383f7645 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 843.234608] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b74efcc9-6443-499b-b380-031f6e3132dd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.245031] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e78427-d340-4633-80b6-44c2ab0eab5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.281198] env[65758]: DEBUG nova.compute.manager [req-e129fd66-8975-4246-937b-ef68df6a387a req-79f028e7-98b2-45cb-b698-649f434a8b4e service nova] [instance: b7323030-4573-4af5-a19a-212a140d642a] Detach interface failed, port_id=6309f110-000f-4e57-a80d-4966b9d936ef, reason: Instance b7323030-4573-4af5-a19a-212a140d642a could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 843.345352] env[65758]: INFO nova.compute.manager [-] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Took 1.39 seconds to deallocate network for instance. [ 843.366223] env[65758]: DEBUG nova.network.neutron [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 843.458914] env[65758]: INFO nova.compute.manager [-] [instance: b7323030-4573-4af5-a19a-212a140d642a] Took 1.40 seconds to deallocate network for instance. [ 843.542312] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.637830] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 843.655488] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660526, 'name': Rename_Task, 'duration_secs': 0.501476} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.655792] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.656035] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-951c7d4b-2959-495e-851c-8572529ff4a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.666593] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 843.666593] env[65758]: value = "task-4660527" [ 843.666593] env[65758]: _type = "Task" [ 843.666593] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.677524] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660527, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.855942] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.870453] env[65758]: INFO nova.compute.manager [-] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Took 1.05 seconds to deallocate network for instance. [ 843.968026] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.076660] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa55d1e1-b346-4fab-ba84-22f9b13fdcbf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.100429] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a09509e-2cec-4aaa-8198-90811a9798ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.111566] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance '105c53ce-e657-4a29-bc7f-96b4f885707a' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 844.149624] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 844.149911] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.547s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.150213] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.248s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.150422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.152689] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.972s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.155070] env[65758]: INFO nova.compute.claims [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.158536] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.158672] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Cleaning up deleted instances {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11916}} [ 844.179569] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660527, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.195448] env[65758]: INFO nova.scheduler.client.report [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Deleted allocations for instance 5e54e7f4-3df1-4283-bee1-a7e475051a24 [ 844.379353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.618985] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.619401] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08ee0b11-247c-445e-a048-3abd7699489d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.629208] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 844.629208] env[65758]: value = "task-4660528" [ 844.629208] env[65758]: _type = "Task" [ 844.629208] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.638792] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.669112] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] There are 33 instances to clean {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11925}} [ 844.669439] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: fb379346-f17a-4433-bb55-2b72025e9a61] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 844.684040] env[65758]: DEBUG oslo_vmware.api [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660527, 'name': PowerOnVM_Task, 'duration_secs': 0.768818} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.685149] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.685600] env[65758]: INFO nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Took 8.03 seconds to spawn the instance on the hypervisor. [ 844.685952] env[65758]: DEBUG nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 844.688161] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86a6a63-a6f3-4839-b075-e41b879c1ef8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.705687] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bcce908e-bfd5-45df-941a-a0dce0fac04a tempest-ServerShowV254Test-1546303347 tempest-ServerShowV254Test-1546303347-project-member] Lock "5e54e7f4-3df1-4283-bee1-a7e475051a24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.080s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.141504] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660528, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.174794] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a0a9d947-f2ad-4a35-b336-1486c9a76b06] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 845.217469] env[65758]: INFO nova.compute.manager [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Took 51.49 seconds to build instance. [ 845.644583] env[65758]: DEBUG oslo_vmware.api [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660528, 'name': PowerOnVM_Task, 'duration_secs': 0.709375} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.644909] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.645106] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daa38504-3b9e-43c9-afd9-dc6495feec2c tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance '105c53ce-e657-4a29-bc7f-96b4f885707a' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 845.684258] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 54db018a-d54c-4fe5-9a6e-600e801e00b0] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 845.719725] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4ddf59b1-58c4-4c7b-875b-10e73373f4f7 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.609s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.922632] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3894a6b1-c549-43f0-a723-7a723aa7afc2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.932205] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be409144-b837-4bb1-ad32-16ed890c446d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.965365] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f77d385-7811-4d6c-ac47-357beee56f7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.974180] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a38764e-757e-43ca-ac97-8a24b43d4253 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.990521] env[65758]: DEBUG nova.compute.provider_tree [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.186340] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 9118ff13-e2cf-404c-ae4d-2b9dbc52738d] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 846.223610] env[65758]: DEBUG nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 846.469225] env[65758]: DEBUG nova.compute.manager [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Received event network-changed-216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 846.469478] env[65758]: DEBUG nova.compute.manager [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Refreshing instance network info cache due to event network-changed-216bffab-4451-407d-b8dd-9e8687a90b81. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 846.469627] env[65758]: DEBUG oslo_concurrency.lockutils [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] Acquiring lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.469758] env[65758]: DEBUG oslo_concurrency.lockutils [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] Acquired lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.469913] env[65758]: DEBUG nova.network.neutron [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Refreshing network info cache for port 216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 846.494558] env[65758]: DEBUG nova.scheduler.client.report [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 846.689903] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 4fda2aa0-451c-4c0f-a03a-19ea8b083ba1] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 846.752680] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.973405] env[65758]: WARNING neutronclient.v2_0.client [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 846.974231] env[65758]: WARNING openstack [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 846.974630] env[65758]: WARNING openstack [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 847.001106] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.847s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.001106] env[65758]: DEBUG nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 847.002889] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.286s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.004312] env[65758]: INFO nova.compute.claims [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.192193] env[65758]: WARNING neutronclient.v2_0.client [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 847.192979] env[65758]: WARNING openstack [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 847.193425] env[65758]: WARNING openstack [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 847.202869] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: de8f3600-b25f-4396-af37-ea703587979c] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 847.287820] env[65758]: DEBUG nova.network.neutron [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updated VIF entry in instance network info cache for port 216bffab-4451-407d-b8dd-9e8687a90b81. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 847.288293] env[65758]: DEBUG nova.network.neutron [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [{"id": "216bffab-4451-407d-b8dd-9e8687a90b81", "address": "fa:16:3e:74:3c:81", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216bffab-44", "ovs_interfaceid": "216bffab-4451-407d-b8dd-9e8687a90b81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 847.508823] env[65758]: DEBUG nova.compute.utils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 847.514208] env[65758]: DEBUG nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 847.514208] env[65758]: DEBUG nova.network.neutron [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 847.514208] env[65758]: WARNING neutronclient.v2_0.client [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 847.514445] env[65758]: WARNING neutronclient.v2_0.client [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 847.515202] env[65758]: WARNING openstack [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 847.515561] env[65758]: WARNING openstack [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 847.570948] env[65758]: DEBUG nova.policy [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1278bf40e97444969b59aa6eed0c98c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee7ee47fdf0c4bf9802f9f6ef642150a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 847.710510] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: b7692c74-c919-45b4-991b-c06a530ff9ef] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 847.791969] env[65758]: DEBUG oslo_concurrency.lockutils [req-a93a6e8d-c428-4f29-b504-409989d103f5 req-4f416a4a-ca51-4b72-8ec0-3ce366b8085c service nova] Releasing lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.963532] env[65758]: DEBUG nova.network.neutron [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Successfully created port: bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 848.016869] env[65758]: DEBUG nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 848.135755] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0601463-98f0-4c61-ac2c-ff343407d287 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.143937] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e8a7fc-ea7b-465f-a62e-4f4fd1ef5b3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.178936] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced38178-e73b-4166-aef2-74ccc509faa4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.187503] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7dcb98-8761-479e-8a7a-1fda072ab977 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.204150] env[65758]: DEBUG nova.compute.provider_tree [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.218184] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 8a7f1d79-97ac-4503-a4ed-c99e4f6718c9] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 848.226356] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 848.293080] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 848.293591] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 848.293914] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 848.385780] env[65758]: DEBUG nova.network.neutron [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Port ea073371-1ad8-47ae-9cca-67a419a8e219 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 848.386080] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.386230] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.386397] env[65758]: DEBUG nova.network.neutron [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 848.522714] env[65758]: INFO nova.virt.block_device [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Booting with volume 21f94ac1-a7a7-4e71-865b-3193eae1848e at /dev/sda [ 848.569264] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e3515a4-450c-4bcf-b8a4-1ec09e601df8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.579727] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec35adc-02a4-4ea5-8cdf-27a53fad4b10 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.616322] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d321b7d8-3bd6-44a7-a904-137a650f2ebe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.627060] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdb39bb-9338-49a9-9a47-fdcd85e129db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.662144] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1301027-e7e7-46e4-bfb9-2f22cadbcc2b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.669604] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b840db6-15c5-4a55-b316-3b5c40b310df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.682956] env[65758]: DEBUG nova.virt.block_device [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Updating existing volume attachment record: 6ecaf2e2-913a-47b6-9c8c-961b5c26e6b1 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 848.707148] env[65758]: DEBUG nova.scheduler.client.report [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.722371] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 2bd02c6d-a139-4259-8b28-eed5efc5d094] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 848.889794] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 848.890907] env[65758]: WARNING openstack [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 848.891314] env[65758]: WARNING openstack [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 849.149021] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 849.149368] env[65758]: WARNING openstack [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 849.149779] env[65758]: WARNING openstack [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 849.212250] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.209s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.212870] env[65758]: DEBUG nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 849.220029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.616s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.220029] env[65758]: INFO nova.compute.claims [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.225148] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: b6b673e9-0ae1-4c7c-be53-e83641063cf8] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 849.490144] env[65758]: DEBUG nova.network.neutron [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Successfully updated port: bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 849.570971] env[65758]: DEBUG nova.network.neutron [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance_info_cache with network_info: [{"id": "ea073371-1ad8-47ae-9cca-67a419a8e219", "address": "fa:16:3e:e4:10:d3", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea073371-1a", "ovs_interfaceid": "ea073371-1ad8-47ae-9cca-67a419a8e219", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 849.712349] env[65758]: DEBUG nova.compute.manager [req-fe492232-2c90-4bd3-acd0-1a3039085d90 req-9d18c69b-861f-4387-96db-49a30674b7c9 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Received event network-vif-plugged-bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 849.713549] env[65758]: DEBUG oslo_concurrency.lockutils [req-fe492232-2c90-4bd3-acd0-1a3039085d90 req-9d18c69b-861f-4387-96db-49a30674b7c9 service nova] Acquiring lock "d42d0818-1486-4696-9871-2cf989aeb885-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.713549] env[65758]: DEBUG oslo_concurrency.lockutils [req-fe492232-2c90-4bd3-acd0-1a3039085d90 req-9d18c69b-861f-4387-96db-49a30674b7c9 service nova] Lock "d42d0818-1486-4696-9871-2cf989aeb885-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.713549] env[65758]: DEBUG oslo_concurrency.lockutils [req-fe492232-2c90-4bd3-acd0-1a3039085d90 req-9d18c69b-861f-4387-96db-49a30674b7c9 service nova] Lock "d42d0818-1486-4696-9871-2cf989aeb885-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.713549] env[65758]: DEBUG nova.compute.manager [req-fe492232-2c90-4bd3-acd0-1a3039085d90 req-9d18c69b-861f-4387-96db-49a30674b7c9 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] No waiting events found dispatching network-vif-plugged-bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 849.714061] env[65758]: WARNING nova.compute.manager [req-fe492232-2c90-4bd3-acd0-1a3039085d90 req-9d18c69b-861f-4387-96db-49a30674b7c9 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Received unexpected event network-vif-plugged-bb884939-9aaf-474f-9246-eb279d11aa4e for instance with vm_state building and task_state block_device_mapping. [ 849.724808] env[65758]: DEBUG nova.compute.utils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 849.726875] env[65758]: DEBUG nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 849.727082] env[65758]: DEBUG nova.network.neutron [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 849.727407] env[65758]: WARNING neutronclient.v2_0.client [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 849.727722] env[65758]: WARNING neutronclient.v2_0.client [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 849.728375] env[65758]: WARNING openstack [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 849.728651] env[65758]: WARNING openstack [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 849.737551] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 148eddf4-4c01-47bc-be81-451ca57e7347] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 849.794172] env[65758]: DEBUG nova.policy [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2bfc2c6e351e464fb93e2e259efa3b0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fa74a4cc7eb4456a1600fc1858d0135', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 849.992835] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Acquiring lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.995720] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Acquired lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.995720] env[65758]: DEBUG nova.network.neutron [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 850.078708] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.123717] env[65758]: DEBUG nova.network.neutron [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Successfully created port: 8a825fae-1c70-4269-a958-4d8e821b6eeb {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 850.238187] env[65758]: DEBUG nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 850.242560] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 03073968-e679-4ce5-9f84-c4765217b308] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 850.498354] env[65758]: WARNING openstack [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 850.498810] env[65758]: WARNING openstack [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 850.534302] env[65758]: DEBUG nova.network.neutron [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 850.580772] env[65758]: DEBUG nova.compute.manager [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=65758) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:925}} [ 850.581076] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.610514] env[65758]: WARNING neutronclient.v2_0.client [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 850.611227] env[65758]: WARNING openstack [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 850.611649] env[65758]: WARNING openstack [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 850.739137] env[65758]: DEBUG nova.network.neutron [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Updating instance_info_cache with network_info: [{"id": "bb884939-9aaf-474f-9246-eb279d11aa4e", "address": "fa:16:3e:56:a6:cd", "network": {"id": "894a2450-645a-478b-a119-7a6383b9b715", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1509187597-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee7ee47fdf0c4bf9802f9f6ef642150a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb884939-9a", "ovs_interfaceid": "bb884939-9aaf-474f-9246-eb279d11aa4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 850.749179] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 492d1063-8eaf-4207-8d65-341fbc0b6c39] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 850.795487] env[65758]: DEBUG nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 850.795799] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 850.797762] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 850.797957] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 850.798177] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 850.798321] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 850.798466] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 850.798673] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 850.798831] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 850.799020] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 850.799310] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 850.799487] env[65758]: DEBUG nova.virt.hardware [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 850.800369] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26582ac0-da0f-4abb-ac78-95b770a1d9c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.804317] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46bf7f4-f716-4948-aed7-3305ac642ac5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.815292] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8061bdf8-e51a-47c3-8a5e-07dc47b07a44 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.819828] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c78f3b-5fef-4c8b-aad3-4ac2a874b279 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.864203] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2153352b-29a6-4b0b-b249-0c52ca647233 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.872389] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9648caef-8638-4d3b-830b-435aca29e33d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.886663] env[65758]: DEBUG nova.compute.provider_tree [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.242819] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Releasing lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.243294] env[65758]: DEBUG nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Instance network_info: |[{"id": "bb884939-9aaf-474f-9246-eb279d11aa4e", "address": "fa:16:3e:56:a6:cd", "network": {"id": "894a2450-645a-478b-a119-7a6383b9b715", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1509187597-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee7ee47fdf0c4bf9802f9f6ef642150a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb884939-9a", "ovs_interfaceid": "bb884939-9aaf-474f-9246-eb279d11aa4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 851.243801] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:a6:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb884939-9aaf-474f-9246-eb279d11aa4e', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.251275] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Creating folder: Project (ee7ee47fdf0c4bf9802f9f6ef642150a). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.252399] env[65758]: DEBUG nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 851.254553] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 8eb65797-072b-4a7e-853d-26c0adc51bb2] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 851.256757] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1838a479-22e9-4fbb-9517-601521c3543d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.274805] env[65758]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 851.274974] env[65758]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=65758) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 851.275388] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Folder already exists: Project (ee7ee47fdf0c4bf9802f9f6ef642150a). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 851.275578] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Creating folder: Instances. Parent ref: group-v909885. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.277711] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1d2cb3f-24ad-4def-aaa6-cb0be633e451 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.286197] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 851.286445] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 851.286600] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 851.286784] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 851.286922] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 851.287073] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 851.287279] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.287419] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 851.287575] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 851.287729] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 851.287890] env[65758]: DEBUG nova.virt.hardware [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 851.289159] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32eed5c-3ba1-484e-b982-6c9d4573dbb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.293497] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Created folder: Instances in parent group-v909885. [ 851.293788] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 851.294373] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 851.294603] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f7ff6ef-5e1a-40b9-a42c-8080d07c769d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.313387] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfef63c-94cb-4954-bbe8-2aaffc256e87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.319468] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 851.319468] env[65758]: value = "task-4660531" [ 851.319468] env[65758]: _type = "Task" [ 851.319468] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.337599] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660531, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.390712] env[65758]: DEBUG nova.scheduler.client.report [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 851.650784] env[65758]: DEBUG nova.network.neutron [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Successfully updated port: 8a825fae-1c70-4269-a958-4d8e821b6eeb {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 851.759870] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 24016efd-cdb3-4c1e-9c08-8643400e729e] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 851.773636] env[65758]: DEBUG nova.compute.manager [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Received event network-changed-bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 851.774181] env[65758]: DEBUG nova.compute.manager [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Refreshing instance network info cache due to event network-changed-bb884939-9aaf-474f-9246-eb279d11aa4e. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 851.774507] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Acquiring lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.774822] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Acquired lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.774862] env[65758]: DEBUG nova.network.neutron [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Refreshing network info cache for port bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 851.832620] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660531, 'name': CreateVM_Task, 'duration_secs': 0.338598} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.832835] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 851.837400] env[65758]: WARNING neutronclient.v2_0.client [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 851.837960] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909892', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'name': 'volume-21f94ac1-a7a7-4e71-865b-3193eae1848e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd42d0818-1486-4696-9871-2cf989aeb885', 'attached_at': '', 'detached_at': '', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'serial': '21f94ac1-a7a7-4e71-865b-3193eae1848e'}, 'attachment_id': '6ecaf2e2-913a-47b6-9c8c-961b5c26e6b1', 'disk_bus': None, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=65758) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 851.838301] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Root volume attach. Driver type: vmdk {{(pid=65758) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 851.839417] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39990729-350f-4a03-a37f-de53a894e991 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.851030] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac050e1-678f-402b-9097-d9a723c6d92b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.859556] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21db66e9-3b23-4eb7-863d-2f714eca9ae3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.869792] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-e2ad8cb4-6275-48e7-80a7-2d80d5ba461b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.879971] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 851.879971] env[65758]: value = "task-4660532" [ 851.879971] env[65758]: _type = "Task" [ 851.879971] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.894820] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.896463] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.896834] env[65758]: DEBUG nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 851.900041] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.691s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.900340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.902715] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 46.349s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.949680] env[65758]: INFO nova.scheduler.client.report [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Deleted allocations for instance 81f961c3-ec8f-4281-be18-5d605fa73ecc [ 852.153991] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "refresh_cache-85082b72-89dd-47b7-b8ad-f2ad5ad0638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.154103] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquired lock "refresh_cache-85082b72-89dd-47b7-b8ad-f2ad5ad0638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.154467] env[65758]: DEBUG nova.network.neutron [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 852.262722] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 0addcbb1-3561-4c93-b714-37e6b613b962] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 852.277788] env[65758]: WARNING neutronclient.v2_0.client [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 852.278557] env[65758]: WARNING openstack [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 852.278911] env[65758]: WARNING openstack [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 852.395175] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task} progress is 40%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.411221] env[65758]: INFO nova.compute.claims [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.417147] env[65758]: DEBUG nova.compute.utils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 852.420824] env[65758]: DEBUG nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 852.420978] env[65758]: DEBUG nova.network.neutron [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 852.421466] env[65758]: WARNING neutronclient.v2_0.client [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 852.423765] env[65758]: WARNING neutronclient.v2_0.client [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 852.423765] env[65758]: WARNING openstack [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 852.423765] env[65758]: WARNING openstack [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 852.460680] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bf7413bf-498f-4e39-9d45-56b68a0f9d77 tempest-ServersTestMultiNic-1126327395 tempest-ServersTestMultiNic-1126327395-project-member] Lock "81f961c3-ec8f-4281-be18-5d605fa73ecc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.648s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.481814] env[65758]: DEBUG nova.policy [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '742a9f6633b54c6f8cd432ac94b59e25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e3a324879d646699f950687546ea861', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 852.506637] env[65758]: WARNING neutronclient.v2_0.client [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 852.507361] env[65758]: WARNING openstack [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 852.507844] env[65758]: WARNING openstack [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 852.657362] env[65758]: WARNING openstack [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 852.657940] env[65758]: WARNING openstack [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 852.715153] env[65758]: DEBUG nova.network.neutron [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Updated VIF entry in instance network info cache for port bb884939-9aaf-474f-9246-eb279d11aa4e. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 852.715509] env[65758]: DEBUG nova.network.neutron [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Updating instance_info_cache with network_info: [{"id": "bb884939-9aaf-474f-9246-eb279d11aa4e", "address": "fa:16:3e:56:a6:cd", "network": {"id": "894a2450-645a-478b-a119-7a6383b9b715", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1509187597-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee7ee47fdf0c4bf9802f9f6ef642150a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb884939-9a", "ovs_interfaceid": "bb884939-9aaf-474f-9246-eb279d11aa4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 852.759041] env[65758]: DEBUG nova.network.neutron [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 852.765895] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a2010738-759b-480a-8360-2639788056b1] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 852.879568] env[65758]: DEBUG nova.network.neutron [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Successfully created port: a555e91f-164f-4b04-83dd-828041132dcc {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 852.894511] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task} progress is 54%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.913874] env[65758]: WARNING neutronclient.v2_0.client [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 852.914631] env[65758]: WARNING openstack [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 852.915134] env[65758]: WARNING openstack [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 852.924629] env[65758]: INFO nova.compute.resource_tracker [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating resource usage from migration cbce059b-48af-4be4-a4d3-19366314e65f [ 852.932571] env[65758]: DEBUG nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 853.053938] env[65758]: DEBUG nova.network.neutron [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Updating instance_info_cache with network_info: [{"id": "8a825fae-1c70-4269-a958-4d8e821b6eeb", "address": "fa:16:3e:b0:39:77", "network": {"id": "8cd1986c-2c79-4fc9-a099-4020a2007da1", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2079702945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fa74a4cc7eb4456a1600fc1858d0135", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a825fae-1c", "ovs_interfaceid": "8a825fae-1c70-4269-a958-4d8e821b6eeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 853.220687] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Releasing lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.221015] env[65758]: DEBUG nova.compute.manager [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Received event network-vif-plugged-8a825fae-1c70-4269-a958-4d8e821b6eeb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 853.221228] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Acquiring lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.221432] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.221587] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.221750] env[65758]: DEBUG nova.compute.manager [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] No waiting events found dispatching network-vif-plugged-8a825fae-1c70-4269-a958-4d8e821b6eeb {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 853.222034] env[65758]: WARNING nova.compute.manager [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Received unexpected event network-vif-plugged-8a825fae-1c70-4269-a958-4d8e821b6eeb for instance with vm_state building and task_state spawning. [ 853.222206] env[65758]: DEBUG nova.compute.manager [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Received event network-changed-8a825fae-1c70-4269-a958-4d8e821b6eeb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 853.222355] env[65758]: DEBUG nova.compute.manager [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Refreshing instance network info cache due to event network-changed-8a825fae-1c70-4269-a958-4d8e821b6eeb. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 853.222583] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Acquiring lock "refresh_cache-85082b72-89dd-47b7-b8ad-f2ad5ad0638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.271231] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 483765b5-c63c-4aac-9082-519bbc4e6eb5] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 853.398494] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task} progress is 67%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.556314] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49a3153-0c6c-4a96-b591-16903628aab5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.559352] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Releasing lock "refresh_cache-85082b72-89dd-47b7-b8ad-f2ad5ad0638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.559730] env[65758]: DEBUG nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Instance network_info: |[{"id": "8a825fae-1c70-4269-a958-4d8e821b6eeb", "address": "fa:16:3e:b0:39:77", "network": {"id": "8cd1986c-2c79-4fc9-a099-4020a2007da1", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2079702945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fa74a4cc7eb4456a1600fc1858d0135", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a825fae-1c", "ovs_interfaceid": "8a825fae-1c70-4269-a958-4d8e821b6eeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 853.560130] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Acquired lock "refresh_cache-85082b72-89dd-47b7-b8ad-f2ad5ad0638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.560409] env[65758]: DEBUG nova.network.neutron [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Refreshing network info cache for port 8a825fae-1c70-4269-a958-4d8e821b6eeb {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 853.561727] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:39:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a825fae-1c70-4269-a958-4d8e821b6eeb', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.569470] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Creating folder: Project (9fa74a4cc7eb4456a1600fc1858d0135). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.571147] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa7809b3-3923-44dc-ba3b-24c4b9adcad9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.578370] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c299f4-3826-443c-a368-6c7b57a125e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.587477] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Created folder: Project (9fa74a4cc7eb4456a1600fc1858d0135) in parent group-v909763. [ 853.587477] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Creating folder: Instances. Parent ref: group-v909929. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.613624] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24486041-95ff-4fe9-96c2-36c6bc255d36 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.616795] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadd1dde-0a9a-49f2-b140-40f53a032f11 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.628047] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68aeaef-e022-4ef0-a59c-32abc2463071 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.650634] env[65758]: DEBUG nova.compute.provider_tree [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.653616] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Created folder: Instances in parent group-v909929. [ 853.654049] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 853.654382] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.654618] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21c33526-cb06-4173-8780-1460ea791a81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.679810] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.679810] env[65758]: value = "task-4660535" [ 853.679810] env[65758]: _type = "Task" [ 853.679810] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.690827] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660535, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.775135] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 83b637d8-b9fa-4159-b879-c1d737871539] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 853.898168] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task} progress is 81%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.946881] env[65758]: DEBUG nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 853.979717] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 853.979979] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 853.980148] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 853.980331] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 853.980475] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 853.980651] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 853.980862] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.981016] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 853.981182] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 853.981347] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 853.981516] env[65758]: DEBUG nova.virt.hardware [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 853.982518] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb8326a-8087-414a-b4a8-8f94d1b65649 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.995223] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e4559b-93a0-411d-966a-ca09f8f05fae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.071650] env[65758]: WARNING neutronclient.v2_0.client [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 854.073402] env[65758]: WARNING openstack [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 854.073994] env[65758]: WARNING openstack [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 854.155513] env[65758]: DEBUG nova.scheduler.client.report [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.197030] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660535, 'name': CreateVM_Task, 'duration_secs': 0.453266} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.197244] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.197792] env[65758]: WARNING neutronclient.v2_0.client [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 854.198181] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.198331] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.198658] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 854.198946] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50904013-08b3-412e-90d5-bafca58da15d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.208361] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 854.208361] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a4fa12-0f56-4243-574d-5edf09a765f1" [ 854.208361] env[65758]: _type = "Task" [ 854.208361] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.220652] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a4fa12-0f56-4243-574d-5edf09a765f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.280725] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 28ccc013-962d-4607-83a2-5fcd480c27b2] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 854.397728] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task} progress is 95%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.425041] env[65758]: WARNING neutronclient.v2_0.client [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 854.425719] env[65758]: WARNING openstack [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 854.426100] env[65758]: WARNING openstack [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 854.505980] env[65758]: DEBUG nova.network.neutron [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Successfully updated port: a555e91f-164f-4b04-83dd-828041132dcc {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 854.571875] env[65758]: DEBUG nova.network.neutron [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Updated VIF entry in instance network info cache for port 8a825fae-1c70-4269-a958-4d8e821b6eeb. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 854.572341] env[65758]: DEBUG nova.network.neutron [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Updating instance_info_cache with network_info: [{"id": "8a825fae-1c70-4269-a958-4d8e821b6eeb", "address": "fa:16:3e:b0:39:77", "network": {"id": "8cd1986c-2c79-4fc9-a099-4020a2007da1", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2079702945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fa74a4cc7eb4456a1600fc1858d0135", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a825fae-1c", "ovs_interfaceid": "8a825fae-1c70-4269-a958-4d8e821b6eeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 854.661974] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.758s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.661974] env[65758]: INFO nova.compute.manager [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Migrating [ 854.669913] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.454s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.670140] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.672253] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.664s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.672420] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.674275] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.541s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.675790] env[65758]: INFO nova.compute.claims [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.715201] env[65758]: INFO nova.scheduler.client.report [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Deleted allocations for instance 1e249ca9-a7a8-440f-832b-a8f5d84ada8b [ 854.716780] env[65758]: INFO nova.scheduler.client.report [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Deleted allocations for instance 56ff4122-a999-4caf-b805-0754a66d6bc7 [ 854.728018] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a4fa12-0f56-4243-574d-5edf09a765f1, 'name': SearchDatastore_Task, 'duration_secs': 0.015533} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.728496] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.728705] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 854.728890] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.729074] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.729238] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.729505] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75f550a8-3aa4-4275-b389-bb74976bae6d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.741584] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.741811] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 854.744177] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96d978ff-5bed-4733-887e-e03ea6aadcee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.754863] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 854.754863] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f05769-ffca-4806-6cb9-1f9b47ebb56b" [ 854.754863] env[65758]: _type = "Task" [ 854.754863] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.767052] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f05769-ffca-4806-6cb9-1f9b47ebb56b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.786460] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: f7a14628-cc55-41fa-ae89-3958855df8a7] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 854.791196] env[65758]: DEBUG nova.compute.manager [req-7b6dd7f9-0cd5-47a5-9ddd-4dcfffae20f4 req-9eed289f-289b-49df-951d-c6d682196538 service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Received event network-vif-plugged-a555e91f-164f-4b04-83dd-828041132dcc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 854.791583] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b6dd7f9-0cd5-47a5-9ddd-4dcfffae20f4 req-9eed289f-289b-49df-951d-c6d682196538 service nova] Acquiring lock "96103549-80a5-462d-9f73-f5f6363ab9fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.791781] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b6dd7f9-0cd5-47a5-9ddd-4dcfffae20f4 req-9eed289f-289b-49df-951d-c6d682196538 service nova] Lock "96103549-80a5-462d-9f73-f5f6363ab9fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.791969] env[65758]: DEBUG oslo_concurrency.lockutils [req-7b6dd7f9-0cd5-47a5-9ddd-4dcfffae20f4 req-9eed289f-289b-49df-951d-c6d682196538 service nova] Lock "96103549-80a5-462d-9f73-f5f6363ab9fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.792158] env[65758]: DEBUG nova.compute.manager [req-7b6dd7f9-0cd5-47a5-9ddd-4dcfffae20f4 req-9eed289f-289b-49df-951d-c6d682196538 service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] No waiting events found dispatching network-vif-plugged-a555e91f-164f-4b04-83dd-828041132dcc {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 854.792319] env[65758]: WARNING nova.compute.manager [req-7b6dd7f9-0cd5-47a5-9ddd-4dcfffae20f4 req-9eed289f-289b-49df-951d-c6d682196538 service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Received unexpected event network-vif-plugged-a555e91f-164f-4b04-83dd-828041132dcc for instance with vm_state building and task_state spawning. [ 854.899653] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task} progress is 97%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.009358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "refresh_cache-96103549-80a5-462d-9f73-f5f6363ab9fc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.009581] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "refresh_cache-96103549-80a5-462d-9f73-f5f6363ab9fc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.009787] env[65758]: DEBUG nova.network.neutron [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 855.074908] env[65758]: DEBUG oslo_concurrency.lockutils [req-34d10617-3431-4d5b-a771-ee3794eb5c24 req-90bcbb74-f5aa-4c93-a81b-c9da69aa03fb service nova] Releasing lock "refresh_cache-85082b72-89dd-47b7-b8ad-f2ad5ad0638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.188325] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.188527] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.188695] env[65758]: DEBUG nova.network.neutron [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 855.232189] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4149f00c-144c-41e8-9f9e-13ae35fbe767 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "1e249ca9-a7a8-440f-832b-a8f5d84ada8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.996s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.233380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0011d8ac-f32e-4e53-8ff8-870052a12ea9 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984 tempest-FloatingIPsAssociationNegativeTestJSON-1416911984-project-member] Lock "56ff4122-a999-4caf-b805-0754a66d6bc7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.565s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.268224] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f05769-ffca-4806-6cb9-1f9b47ebb56b, 'name': SearchDatastore_Task, 'duration_secs': 0.013364} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.269218] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ab8a110-93a7-46ea-bdb5-4977f5e3b8a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.279772] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 855.279772] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e1ffcb-6ac6-0816-6a7b-26e2d1cef0d7" [ 855.279772] env[65758]: _type = "Task" [ 855.279772] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.289218] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e1ffcb-6ac6-0816-6a7b-26e2d1cef0d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.295255] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 24379189-b10a-4ef6-a3f6-b7bb43029dab] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 855.402233] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task} progress is 98%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.513417] env[65758]: WARNING openstack [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 855.514662] env[65758]: WARNING openstack [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 855.551832] env[65758]: DEBUG nova.network.neutron [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 855.689048] env[65758]: WARNING neutronclient.v2_0.client [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 855.689737] env[65758]: WARNING openstack [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 855.690142] env[65758]: WARNING openstack [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 855.706089] env[65758]: WARNING neutronclient.v2_0.client [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 855.706089] env[65758]: WARNING openstack [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 855.706089] env[65758]: WARNING openstack [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 855.795758] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e1ffcb-6ac6-0816-6a7b-26e2d1cef0d7, 'name': SearchDatastore_Task, 'duration_secs': 0.011431} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.795758] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.795959] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 85082b72-89dd-47b7-b8ad-f2ad5ad0638d/85082b72-89dd-47b7-b8ad-f2ad5ad0638d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 855.796221] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7ac0879-a13b-4f95-8d66-ef9d4640001e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.799860] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 0ac196fa-d88c-45a8-999e-8b5216912041] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 855.806695] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 855.806695] env[65758]: value = "task-4660536" [ 855.806695] env[65758]: _type = "Task" [ 855.806695] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.828711] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.899357] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660532, 'name': RelocateVM_Task, 'duration_secs': 3.608469} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.903566] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 855.903830] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909892', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'name': 'volume-21f94ac1-a7a7-4e71-865b-3193eae1848e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd42d0818-1486-4696-9871-2cf989aeb885', 'attached_at': '', 'detached_at': '', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'serial': '21f94ac1-a7a7-4e71-865b-3193eae1848e'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 855.904927] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065539b9-4bbf-470f-a5dd-7ec327135528 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.926699] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a141bc95-c17c-4cbc-b064-891bdf2bef19 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.952376] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] volume-21f94ac1-a7a7-4e71-865b-3193eae1848e/volume-21f94ac1-a7a7-4e71-865b-3193eae1848e.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.955578] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7dbc061f-dbd7-4d97-bfd7-08cfece586cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.977744] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 855.977744] env[65758]: value = "task-4660537" [ 855.977744] env[65758]: _type = "Task" [ 855.977744] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.990120] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660537, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.261037] env[65758]: DEBUG nova.network.neutron [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Updating instance_info_cache with network_info: [{"id": "a555e91f-164f-4b04-83dd-828041132dcc", "address": "fa:16:3e:d3:2a:e3", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa555e91f-16", "ovs_interfaceid": "a555e91f-164f-4b04-83dd-828041132dcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 856.303654] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: e60efbcd-1c4e-40a1-8bc1-893daa511073] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 856.332711] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660536, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52192} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.333323] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 85082b72-89dd-47b7-b8ad-f2ad5ad0638d/85082b72-89dd-47b7-b8ad-f2ad5ad0638d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 856.333323] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 856.333548] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4560d432-b259-46d1-9f32-bf7f81a8909c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.352030] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 856.352030] env[65758]: value = "task-4660538" [ 856.352030] env[65758]: _type = "Task" [ 856.352030] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.361545] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660538, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.457654] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15526baa-6422-4e3d-9eff-73abe68f18ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.466988] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef8a15e-1a95-468d-b417-0eac141e14b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.509034] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2941acc6-96fa-4ad6-8837-f7b96a0fa17e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.519346] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660537, 'name': ReconfigVM_Task, 'duration_secs': 0.447984} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.520947] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Reconfigured VM instance instance-0000003c to attach disk [datastore1] volume-21f94ac1-a7a7-4e71-865b-3193eae1848e/volume-21f94ac1-a7a7-4e71-865b-3193eae1848e.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.527647] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-733b11ed-9b61-42ea-9f52-b66e5ee3b4fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.537676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1aad5b-15a4-414d-9a63-27fa29e06944 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.553301] env[65758]: DEBUG nova.compute.provider_tree [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.555965] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 856.555965] env[65758]: value = "task-4660539" [ 856.555965] env[65758]: _type = "Task" [ 856.555965] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.566580] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660539, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.763837] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "refresh_cache-96103549-80a5-462d-9f73-f5f6363ab9fc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.764306] env[65758]: DEBUG nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Instance network_info: |[{"id": "a555e91f-164f-4b04-83dd-828041132dcc", "address": "fa:16:3e:d3:2a:e3", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa555e91f-16", "ovs_interfaceid": "a555e91f-164f-4b04-83dd-828041132dcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 856.765026] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:2a:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a555e91f-164f-4b04-83dd-828041132dcc', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.775836] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 856.775836] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.775836] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a80e3c40-9068-48cf-9ac4-00e218082f06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.794991] env[65758]: WARNING neutronclient.v2_0.client [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 856.795537] env[65758]: WARNING openstack [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 856.795948] env[65758]: WARNING openstack [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 856.811436] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.811436] env[65758]: value = "task-4660540" [ 856.811436] env[65758]: _type = "Task" [ 856.811436] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.818958] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: e48a075b-41b3-4612-bd5f-0a158d707a2f] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 856.831489] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660540, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.863265] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660538, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086736} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.863692] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 856.864580] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e6fde7-9002-4c00-8905-8220e3f3102b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.894491] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 85082b72-89dd-47b7-b8ad-f2ad5ad0638d/85082b72-89dd-47b7-b8ad-f2ad5ad0638d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 856.900310] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66764517-f19f-42be-bb86-fb7c1f230422 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.924994] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 856.924994] env[65758]: value = "task-4660541" [ 856.924994] env[65758]: _type = "Task" [ 856.924994] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.936637] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.036236] env[65758]: DEBUG nova.network.neutron [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance_info_cache with network_info: [{"id": "e31ffc86-5e08-405f-8129-6af1973003bf", "address": "fa:16:3e:88:0c:68", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31ffc86-5e", "ovs_interfaceid": "e31ffc86-5e08-405f-8129-6af1973003bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 857.065934] env[65758]: DEBUG nova.scheduler.client.report [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 857.078745] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660539, 'name': ReconfigVM_Task, 'duration_secs': 0.140319} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.079318] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909892', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'name': 'volume-21f94ac1-a7a7-4e71-865b-3193eae1848e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd42d0818-1486-4696-9871-2cf989aeb885', 'attached_at': '', 'detached_at': '', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'serial': '21f94ac1-a7a7-4e71-865b-3193eae1848e'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 857.080921] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9770bcf3-3c36-493b-b284-670ee4e65526 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.091586] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 857.091586] env[65758]: value = "task-4660542" [ 857.091586] env[65758]: _type = "Task" [ 857.091586] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.103062] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660542, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.324579] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660540, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.330652] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 64c4718b-8ed3-4ba5-99e3-a1e0f69cb10b] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 857.438900] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660541, 'name': ReconfigVM_Task, 'duration_secs': 0.496253} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.439473] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 85082b72-89dd-47b7-b8ad-f2ad5ad0638d/85082b72-89dd-47b7-b8ad-f2ad5ad0638d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.441740] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f59ef431-84c9-41df-a9ba-f26037bff526 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.449721] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 857.449721] env[65758]: value = "task-4660543" [ 857.449721] env[65758]: _type = "Task" [ 857.449721] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.461535] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660543, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.539021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.571883] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.897s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.572442] env[65758]: DEBUG nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 857.575287] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.680s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.576644] env[65758]: INFO nova.compute.claims [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.604705] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660542, 'name': Rename_Task, 'duration_secs': 0.311179} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.605122] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.606098] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe131a15-00e4-4ec0-ae2f-9942186c5df9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.613384] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 857.613384] env[65758]: value = "task-4660544" [ 857.613384] env[65758]: _type = "Task" [ 857.613384] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.619078] env[65758]: DEBUG nova.compute.manager [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Received event network-changed-a555e91f-164f-4b04-83dd-828041132dcc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 857.619078] env[65758]: DEBUG nova.compute.manager [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Refreshing instance network info cache due to event network-changed-a555e91f-164f-4b04-83dd-828041132dcc. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 857.619078] env[65758]: DEBUG oslo_concurrency.lockutils [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] Acquiring lock "refresh_cache-96103549-80a5-462d-9f73-f5f6363ab9fc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.619078] env[65758]: DEBUG oslo_concurrency.lockutils [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] Acquired lock "refresh_cache-96103549-80a5-462d-9f73-f5f6363ab9fc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.619078] env[65758]: DEBUG nova.network.neutron [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Refreshing network info cache for port a555e91f-164f-4b04-83dd-828041132dcc {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 857.631570] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.825379] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660540, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.837452] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: adc1b956-1b5a-4272-b0ff-95a565e9c45c] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 857.964476] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660543, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.086691] env[65758]: DEBUG nova.compute.utils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 858.090848] env[65758]: DEBUG nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 858.092678] env[65758]: DEBUG nova.network.neutron [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 858.092678] env[65758]: WARNING neutronclient.v2_0.client [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 858.092678] env[65758]: WARNING neutronclient.v2_0.client [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 858.095692] env[65758]: WARNING openstack [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 858.096811] env[65758]: WARNING openstack [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 858.127369] env[65758]: WARNING neutronclient.v2_0.client [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 858.129441] env[65758]: WARNING openstack [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 858.129441] env[65758]: WARNING openstack [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 858.138929] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660544, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.174782] env[65758]: DEBUG nova.policy [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f2ed00d7f814d1f907ba5900c8f3025', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16188c7bd36d4b0eaffdc980b71ac727', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 858.327716] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660540, 'name': CreateVM_Task, 'duration_secs': 1.39557} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.328937] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 858.328937] env[65758]: WARNING neutronclient.v2_0.client [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 858.328937] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.328937] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.329991] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 858.329991] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dc66e9e-a357-4ac4-bcee-291eb060f65a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.335760] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 858.335760] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523fb1a4-c339-af4a-625a-3fc4aef5ce22" [ 858.335760] env[65758]: _type = "Task" [ 858.335760] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.342813] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: f1a1650b-4c45-47fc-9c45-f4625c959597] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 858.356308] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523fb1a4-c339-af4a-625a-3fc4aef5ce22, 'name': SearchDatastore_Task, 'duration_secs': 0.01214} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.357045] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.357045] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.357678] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.357930] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.358242] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.358601] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebf93c6a-4f57-43b6-88b5-bc91e695d4a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.374022] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.374022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.374022] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570498c2-6ab7-4719-91d3-a4a0510b3038 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.385987] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 858.385987] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b5cbaf-f6fd-eb53-2ac9-d401252fc254" [ 858.385987] env[65758]: _type = "Task" [ 858.385987] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.399025] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b5cbaf-f6fd-eb53-2ac9-d401252fc254, 'name': SearchDatastore_Task, 'duration_secs': 0.010988} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.401958] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee0600bf-87a2-4b66-ba8c-d9b9ccfd4ff9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.408732] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 858.408732] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5247b399-61c1-bd4e-3195-d41b3b355011" [ 858.408732] env[65758]: _type = "Task" [ 858.408732] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.420388] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5247b399-61c1-bd4e-3195-d41b3b355011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.463789] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660543, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.591547] env[65758]: DEBUG nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 858.628850] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660544, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.660836] env[65758]: DEBUG nova.network.neutron [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Successfully created port: 1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 858.689777] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d101c577-e6e5-453f-b79a-952318df7c52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.699865] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89540fa-322a-4d35-b68c-c81219c9f59c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.735586] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72489638-5b98-49e7-ba4f-d7c99ca028f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.745526] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fd83c8-670c-4f4a-b566-4bf06c782205 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.761829] env[65758]: DEBUG nova.compute.provider_tree [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.849437] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: e4540963-7be9-426e-90f8-b31524d2237b] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 858.920040] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5247b399-61c1-bd4e-3195-d41b3b355011, 'name': SearchDatastore_Task, 'duration_secs': 0.010372} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.920329] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.920589] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 96103549-80a5-462d-9f73-f5f6363ab9fc/96103549-80a5-462d-9f73-f5f6363ab9fc.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.920858] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f444fbdf-9d45-4d65-a7ee-b146241e51ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.928750] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 858.928750] env[65758]: value = "task-4660545" [ 858.928750] env[65758]: _type = "Task" [ 858.928750] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.938778] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660545, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.963216] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660543, 'name': Rename_Task, 'duration_secs': 1.153872} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.963454] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 858.963672] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcc53016-baae-41c0-841f-9676985e9f39 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.972700] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 858.972700] env[65758]: value = "task-4660546" [ 858.972700] env[65758]: _type = "Task" [ 858.972700] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.982171] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660546, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.057164] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74597428-8a73-474a-9c82-c392c7162234 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.078279] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance 'a9ec9a64-94c7-41a5-a7a4-5e034ddfc592' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 859.128976] env[65758]: DEBUG oslo_vmware.api [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660544, 'name': PowerOnVM_Task, 'duration_secs': 1.40576} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.129715] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.130185] env[65758]: INFO nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Took 8.33 seconds to spawn the instance on the hypervisor. [ 859.130570] env[65758]: DEBUG nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 859.133374] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1702ce92-6bbc-4f30-93b3-e0496ac5c36a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.265769] env[65758]: DEBUG nova.scheduler.client.report [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.352980] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a0a3efdf-9b2d-4c07-a47d-1ba07c0b4974] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 859.439483] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660545, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.484936] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660546, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.585389] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.585740] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd3532e4-8eda-4b97-a172-d2930b010ea7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.599395] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 859.599395] env[65758]: value = "task-4660547" [ 859.599395] env[65758]: _type = "Task" [ 859.599395] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.607047] env[65758]: DEBUG nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 859.616621] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.638344] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 859.638615] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 859.638773] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 859.638937] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 859.639286] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 859.639450] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 859.639691] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 859.639845] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 859.640018] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 859.640179] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 859.640342] env[65758]: DEBUG nova.virt.hardware [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 859.641260] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c35284-1342-4b63-91f5-8ff1162bfa72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.656239] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0ab52d-2935-49da-a437-10ef2ec078e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.661353] env[65758]: INFO nova.compute.manager [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Took 60.51 seconds to build instance. [ 859.771106] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.196s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.771676] env[65758]: DEBUG nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 859.775561] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.262s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.777214] env[65758]: INFO nova.compute.claims [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.859720] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 9e16d31b-e84c-448b-9d83-98cac49570a0] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 859.941785] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660545, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523464} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.942121] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 96103549-80a5-462d-9f73-f5f6363ab9fc/96103549-80a5-462d-9f73-f5f6363ab9fc.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.942336] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.943031] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab46afc6-797e-44a4-942b-ef5a704c75cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.951096] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 859.951096] env[65758]: value = "task-4660548" [ 859.951096] env[65758]: _type = "Task" [ 859.951096] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.961904] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.985345] env[65758]: DEBUG oslo_vmware.api [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660546, 'name': PowerOnVM_Task, 'duration_secs': 0.62594} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.985639] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.985885] env[65758]: INFO nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Took 8.73 seconds to spawn the instance on the hypervisor. [ 859.986084] env[65758]: DEBUG nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 859.986919] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea6ff65-ec8c-4994-b279-dfc0e1cd3f84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.111117] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660547, 'name': PowerOffVM_Task, 'duration_secs': 0.267615} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.111428] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 860.111628] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance 'a9ec9a64-94c7-41a5-a7a4-5e034ddfc592' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 860.163682] env[65758]: DEBUG oslo_concurrency.lockutils [None req-49829798-afd2-4351-98ee-8d8415e3aecc tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "d42d0818-1486-4696-9871-2cf989aeb885" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.222s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.281863] env[65758]: DEBUG nova.compute.utils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 860.285630] env[65758]: DEBUG nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 860.285891] env[65758]: DEBUG nova.network.neutron [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 860.286259] env[65758]: WARNING neutronclient.v2_0.client [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 860.289484] env[65758]: WARNING neutronclient.v2_0.client [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 860.289484] env[65758]: WARNING openstack [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 860.289484] env[65758]: WARNING openstack [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 860.364212] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 549673ec-3d75-4aad-a001-014f3f53a6b0] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 860.463515] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085622} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.463936] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.465291] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c337cc-0eb9-4a9e-af14-5aa71ce4cb5e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.498347] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 96103549-80a5-462d-9f73-f5f6363ab9fc/96103549-80a5-462d-9f73-f5f6363ab9fc.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.504178] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f6d5b10-8730-423e-9a37-7dd13ffe5f89 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.530572] env[65758]: INFO nova.compute.manager [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Took 59.85 seconds to build instance. [ 860.534182] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 860.534182] env[65758]: value = "task-4660549" [ 860.534182] env[65758]: _type = "Task" [ 860.534182] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.546227] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660549, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.619750] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 860.620017] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 860.620186] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 860.620397] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 860.620561] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 860.620715] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 860.621029] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 860.621115] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 860.621287] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 860.621457] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 860.621648] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 860.632107] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfaba2de-e2eb-4239-8ba7-0cc4f430ff58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.654041] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 860.654041] env[65758]: value = "task-4660550" [ 860.654041] env[65758]: _type = "Task" [ 860.654041] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.655246] env[65758]: DEBUG nova.network.neutron [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Successfully updated port: 1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 860.670522] env[65758]: DEBUG nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 860.672459] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660550, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.692309] env[65758]: WARNING neutronclient.v2_0.client [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 860.693155] env[65758]: WARNING openstack [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 860.693513] env[65758]: WARNING openstack [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 860.786460] env[65758]: DEBUG nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 860.869452] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 67fdb417-62ea-412c-8b82-868d59149f89] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 860.922175] env[65758]: DEBUG nova.policy [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35ec6fec92574772ba18d61856273108', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bed522365ca465f90708212bdb65510', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 861.037038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a09a01a5-e9ea-4be7-8d74-a1aa204b874e tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.937s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.049756] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660549, 'name': ReconfigVM_Task, 'duration_secs': 0.321303} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.050391] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 96103549-80a5-462d-9f73-f5f6363ab9fc/96103549-80a5-462d-9f73-f5f6363ab9fc.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.050854] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9be1fa27-38e3-4b6f-ad74-aac66ebe07b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.059748] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 861.059748] env[65758]: value = "task-4660551" [ 861.059748] env[65758]: _type = "Task" [ 861.059748] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.070473] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660551, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.158035] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.158228] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.158400] env[65758]: DEBUG nova.network.neutron [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 861.172458] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660550, 'name': ReconfigVM_Task, 'duration_secs': 0.231655} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.172458] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance 'a9ec9a64-94c7-41a5-a7a4-5e034ddfc592' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 861.202086] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.372824] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.373105] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Cleaning up deleted instances with incomplete migration {{(pid=65758) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11954}} [ 861.428363] env[65758]: DEBUG nova.network.neutron [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Updated VIF entry in instance network info cache for port a555e91f-164f-4b04-83dd-828041132dcc. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 861.428725] env[65758]: DEBUG nova.network.neutron [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Updating instance_info_cache with network_info: [{"id": "a555e91f-164f-4b04-83dd-828041132dcc", "address": "fa:16:3e:d3:2a:e3", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa555e91f-16", "ovs_interfaceid": "a555e91f-164f-4b04-83dd-828041132dcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 861.446531] env[65758]: DEBUG nova.network.neutron [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Successfully created port: 31402f5e-3e8a-4ff8-a2b3-4b5992fb142a {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 861.451747] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06c950e-1fcf-434d-8bec-2487e2bbda8f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.461612] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76a58c8-5b58-49c2-9dc1-19624ea8e74f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.498469] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aba551c-b7d2-4408-850a-831efcb079ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.507899] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10283327-621e-46b5-b43f-98d53ed607a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.524617] env[65758]: DEBUG nova.compute.provider_tree [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.543668] env[65758]: DEBUG nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 861.572080] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660551, 'name': Rename_Task, 'duration_secs': 0.158941} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.572464] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.572777] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3743ab33-cee9-4812-878d-3ab5bba1fae9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.582130] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 861.582130] env[65758]: value = "task-4660552" [ 861.582130] env[65758]: _type = "Task" [ 861.582130] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.593503] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.662767] env[65758]: WARNING openstack [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 861.663228] env[65758]: WARNING openstack [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 861.680446] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 861.680800] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 861.681026] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 861.681255] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 861.682934] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 861.683044] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 861.683284] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.683458] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 861.683645] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 861.683833] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 861.684039] env[65758]: DEBUG nova.virt.hardware [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 861.692726] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Reconfiguring VM instance instance-00000033 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 861.693106] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08292d2c-e58d-431e-8d32-d41c57caf4c5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.714912] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 861.714912] env[65758]: value = "task-4660553" [ 861.714912] env[65758]: _type = "Task" [ 861.714912] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.726594] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660553, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.803104] env[65758]: DEBUG nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 861.837684] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 861.837972] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 861.838160] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 861.838409] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 861.838565] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 861.838704] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 861.838905] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.839070] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 861.839271] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 861.839482] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 861.839789] env[65758]: DEBUG nova.virt.hardware [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 861.840964] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e62f31-8731-46a9-b43f-b33f8ff69165 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.851924] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7f5627-ec8a-46e2-9146-5591f5e5cd11 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.877300] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.934654] env[65758]: DEBUG oslo_concurrency.lockutils [req-46db2f9b-18fb-44d9-a660-55687c9b08cb req-ecf58762-465f-4178-b7a2-dc070fd09083 service nova] Releasing lock "refresh_cache-96103549-80a5-462d-9f73-f5f6363ab9fc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.028162] env[65758]: DEBUG nova.scheduler.client.report [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.098186] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660552, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.100044] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.226772] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660553, 'name': ReconfigVM_Task, 'duration_secs': 0.205681} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.227346] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Reconfigured VM instance instance-00000033 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 862.229433] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18edf64-4f41-447a-80ef-0f7eee6022db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.256449] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] a9ec9a64-94c7-41a5-a7a4-5e034ddfc592/a9ec9a64-94c7-41a5-a7a4-5e034ddfc592.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.256892] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9d85e8e-1364-4808-82b3-623b67b51621 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.277365] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 862.277365] env[65758]: value = "task-4660554" [ 862.277365] env[65758]: _type = "Task" [ 862.277365] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.288490] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660554, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.538179] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.539177] env[65758]: DEBUG nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 862.542922] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.168s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.543367] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.547654] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.738s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.548132] env[65758]: INFO nova.compute.claims [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.586046] env[65758]: INFO nova.scheduler.client.report [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted allocations for instance 9e007d55-0a5c-4469-a546-9b18e188bea0 [ 862.604991] env[65758]: DEBUG oslo_vmware.api [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660552, 'name': PowerOnVM_Task, 'duration_secs': 0.597131} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.605360] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.605622] env[65758]: INFO nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Took 8.66 seconds to spawn the instance on the hypervisor. [ 862.606379] env[65758]: DEBUG nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 862.609347] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da35266e-4b40-4087-bc89-6eef42f01dbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.788453] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660554, 'name': ReconfigVM_Task, 'duration_secs': 0.304728} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.789293] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Reconfigured VM instance instance-00000033 to attach disk [datastore1] a9ec9a64-94c7-41a5-a7a4-5e034ddfc592/a9ec9a64-94c7-41a5-a7a4-5e034ddfc592.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 862.789293] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance 'a9ec9a64-94c7-41a5-a7a4-5e034ddfc592' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 863.040793] env[65758]: DEBUG nova.network.neutron [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 863.055387] env[65758]: DEBUG nova.compute.utils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 863.060932] env[65758]: DEBUG nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 863.060932] env[65758]: DEBUG nova.network.neutron [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 863.060932] env[65758]: WARNING neutronclient.v2_0.client [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.061869] env[65758]: WARNING neutronclient.v2_0.client [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.062691] env[65758]: WARNING openstack [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 863.063156] env[65758]: WARNING openstack [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 863.105453] env[65758]: DEBUG oslo_concurrency.lockutils [None req-39783a45-ba0c-4801-843d-e257945f52fb tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "9e007d55-0a5c-4469-a546-9b18e188bea0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.482s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.127293] env[65758]: DEBUG nova.network.neutron [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Successfully updated port: 31402f5e-3e8a-4ff8-a2b3-4b5992fb142a {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 863.137174] env[65758]: INFO nova.compute.manager [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Took 59.56 seconds to build instance. [ 863.155511] env[65758]: DEBUG nova.policy [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35ec6fec92574772ba18d61856273108', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bed522365ca465f90708212bdb65510', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 863.196066] env[65758]: WARNING neutronclient.v2_0.client [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.196730] env[65758]: WARNING openstack [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 863.197197] env[65758]: WARNING openstack [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 863.296476] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f342468-41c3-4ad3-bc67-cca555664c1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.318470] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5aecd0-4ed4-45a6-b722-6e4d90a0bcb9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.339618] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance 'a9ec9a64-94c7-41a5-a7a4-5e034ddfc592' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 863.365000] env[65758]: DEBUG nova.network.neutron [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updating instance_info_cache with network_info: [{"id": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "address": "fa:16:3e:f0:a2:8e", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb28e5b-cb", "ovs_interfaceid": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 863.447531] env[65758]: DEBUG nova.network.neutron [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Successfully created port: 891c589c-0854-41a8-8eb2-e06ac9124837 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 863.540690] env[65758]: DEBUG nova.compute.manager [req-e3a0bc09-037c-4c7b-b290-20802a204fee req-6546ecf8-ec78-4d2e-bab4-c6fc077f6346 service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Received event network-vif-plugged-1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 863.541811] env[65758]: DEBUG oslo_concurrency.lockutils [req-e3a0bc09-037c-4c7b-b290-20802a204fee req-6546ecf8-ec78-4d2e-bab4-c6fc077f6346 service nova] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.542101] env[65758]: DEBUG oslo_concurrency.lockutils [req-e3a0bc09-037c-4c7b-b290-20802a204fee req-6546ecf8-ec78-4d2e-bab4-c6fc077f6346 service nova] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.542271] env[65758]: DEBUG oslo_concurrency.lockutils [req-e3a0bc09-037c-4c7b-b290-20802a204fee req-6546ecf8-ec78-4d2e-bab4-c6fc077f6346 service nova] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.542433] env[65758]: DEBUG nova.compute.manager [req-e3a0bc09-037c-4c7b-b290-20802a204fee req-6546ecf8-ec78-4d2e-bab4-c6fc077f6346 service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] No waiting events found dispatching network-vif-plugged-1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 863.543447] env[65758]: WARNING nova.compute.manager [req-e3a0bc09-037c-4c7b-b290-20802a204fee req-6546ecf8-ec78-4d2e-bab4-c6fc077f6346 service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Received unexpected event network-vif-plugged-1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea for instance with vm_state building and task_state spawning. [ 863.560998] env[65758]: DEBUG nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 863.636595] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.636791] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.637032] env[65758]: DEBUG nova.network.neutron [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 863.638989] env[65758]: DEBUG oslo_concurrency.lockutils [None req-667cf179-2690-44b5-9413-a0453c4f127e tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "96103549-80a5-462d-9f73-f5f6363ab9fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.746s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.851992] env[65758]: WARNING neutronclient.v2_0.client [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.852893] env[65758]: WARNING neutronclient.v2_0.client [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 863.868294] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Releasing lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.868687] env[65758]: DEBUG nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Instance network_info: |[{"id": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "address": "fa:16:3e:f0:a2:8e", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb28e5b-cb", "ovs_interfaceid": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 863.870407] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:a2:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.878936] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Creating folder: Project (16188c7bd36d4b0eaffdc980b71ac727). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.881815] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-699a92b1-8830-48dc-b407-301da6c7ba88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.895051] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Created folder: Project (16188c7bd36d4b0eaffdc980b71ac727) in parent group-v909763. [ 863.895051] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Creating folder: Instances. Parent ref: group-v909933. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.895334] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba945cd4-268b-40df-8f0c-303515c642da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.908518] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Created folder: Instances in parent group-v909933. [ 863.908804] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 863.909068] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.909323] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-616792f3-f0cc-4336-932f-bf8b4af18561 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.934932] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.934932] env[65758]: value = "task-4660557" [ 863.934932] env[65758]: _type = "Task" [ 863.934932] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.944021] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660557, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.951010] env[65758]: DEBUG nova.network.neutron [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Port e31ffc86-5e08-405f-8129-6af1973003bf binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 864.141047] env[65758]: WARNING openstack [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 864.142781] env[65758]: WARNING openstack [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 864.159022] env[65758]: DEBUG nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 864.186952] env[65758]: DEBUG nova.compute.manager [req-d9e2ee78-b905-45fd-adc4-8be061bbf0d5 req-efc0a9c9-8e0e-4d88-81cb-41fc65a62f37 service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Received event network-vif-plugged-31402f5e-3e8a-4ff8-a2b3-4b5992fb142a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 864.186952] env[65758]: DEBUG oslo_concurrency.lockutils [req-d9e2ee78-b905-45fd-adc4-8be061bbf0d5 req-efc0a9c9-8e0e-4d88-81cb-41fc65a62f37 service nova] Acquiring lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.186952] env[65758]: DEBUG oslo_concurrency.lockutils [req-d9e2ee78-b905-45fd-adc4-8be061bbf0d5 req-efc0a9c9-8e0e-4d88-81cb-41fc65a62f37 service nova] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.186952] env[65758]: DEBUG oslo_concurrency.lockutils [req-d9e2ee78-b905-45fd-adc4-8be061bbf0d5 req-efc0a9c9-8e0e-4d88-81cb-41fc65a62f37 service nova] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.186952] env[65758]: DEBUG nova.compute.manager [req-d9e2ee78-b905-45fd-adc4-8be061bbf0d5 req-efc0a9c9-8e0e-4d88-81cb-41fc65a62f37 service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] No waiting events found dispatching network-vif-plugged-31402f5e-3e8a-4ff8-a2b3-4b5992fb142a {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 864.186952] env[65758]: WARNING nova.compute.manager [req-d9e2ee78-b905-45fd-adc4-8be061bbf0d5 req-efc0a9c9-8e0e-4d88-81cb-41fc65a62f37 service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Received unexpected event network-vif-plugged-31402f5e-3e8a-4ff8-a2b3-4b5992fb142a for instance with vm_state building and task_state spawning. [ 864.198548] env[65758]: DEBUG nova.network.neutron [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 864.254744] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505ff7bc-d5d2-4e1c-9aad-e575a719f6d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.267536] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9c7468-beac-45c8-a681-93740e541b7f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.305427] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3bf069-8bc7-4b10-a135-758da3ebc251 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.316026] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9167ea4-7c26-49ec-83af-fc5ef7d247dc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.322018] env[65758]: WARNING neutronclient.v2_0.client [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 864.322018] env[65758]: WARNING openstack [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 864.322018] env[65758]: WARNING openstack [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 864.341403] env[65758]: DEBUG nova.compute.provider_tree [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.446427] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660557, 'name': CreateVM_Task, 'duration_secs': 0.367612} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.446658] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.447254] env[65758]: WARNING neutronclient.v2_0.client [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 864.447631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.447779] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.448134] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 864.448440] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42f1d302-bcbd-4039-8f6a-481e52f15101 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.458726] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 864.458726] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5277ba19-04c5-4211-cabd-b3fb4365799b" [ 864.458726] env[65758]: _type = "Task" [ 864.458726] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.469648] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5277ba19-04c5-4211-cabd-b3fb4365799b, 'name': SearchDatastore_Task, 'duration_secs': 0.011992} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.469984] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.469984] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.470195] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.470267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.470480] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.470723] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a68f01a2-d5b0-4211-bcd5-ca88bf2fb002 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.481788] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.482013] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.485041] env[65758]: DEBUG nova.network.neutron [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Updating instance_info_cache with network_info: [{"id": "31402f5e-3e8a-4ff8-a2b3-4b5992fb142a", "address": "fa:16:3e:ec:b2:6e", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31402f5e-3e", "ovs_interfaceid": "31402f5e-3e8a-4ff8-a2b3-4b5992fb142a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 864.486344] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e939cf3d-2f79-4b26-b8aa-ab15c0c98568 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.496489] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 864.496489] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ca340e-afc6-7eae-708e-614b6eff620a" [ 864.496489] env[65758]: _type = "Task" [ 864.496489] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.506763] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ca340e-afc6-7eae-708e-614b6eff620a, 'name': SearchDatastore_Task, 'duration_secs': 0.010384} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.507615] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc45d0fa-45ee-4d18-b49e-a296ca59fe83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.516121] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 864.516121] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5268fd4f-32cf-5b4c-fe68-1a3ba5f33ae1" [ 864.516121] env[65758]: _type = "Task" [ 864.516121] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.527206] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5268fd4f-32cf-5b4c-fe68-1a3ba5f33ae1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.580657] env[65758]: DEBUG nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 864.604170] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 864.604440] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 864.604593] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 864.604770] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 864.604911] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 864.605064] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 864.605276] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.605429] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 864.605617] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 864.605739] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 864.605910] env[65758]: DEBUG nova.virt.hardware [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 864.606796] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cef09d-a72e-4404-9f27-d5afc6e86d2d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.616789] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5752b0b0-08df-4e6f-94f7-7a20c0256d4d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.682332] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.806164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "12c27fac-98e9-486d-bf36-0580a4e0a163" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.806272] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "12c27fac-98e9-486d-bf36-0580a4e0a163" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.844923] env[65758]: DEBUG nova.scheduler.client.report [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.977340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.979766] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.979766] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.989593] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.990719] env[65758]: DEBUG nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Instance network_info: |[{"id": "31402f5e-3e8a-4ff8-a2b3-4b5992fb142a", "address": "fa:16:3e:ec:b2:6e", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31402f5e-3e", "ovs_interfaceid": "31402f5e-3e8a-4ff8-a2b3-4b5992fb142a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 864.990981] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:b2:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49b5df12-d801-4140-8816-2fd401608c7d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31402f5e-3e8a-4ff8-a2b3-4b5992fb142a', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.002638] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Creating folder: Project (5bed522365ca465f90708212bdb65510). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.004204] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9905e10a-962b-4617-b2fa-cd66a22d1cac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.018309] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Created folder: Project (5bed522365ca465f90708212bdb65510) in parent group-v909763. [ 865.019260] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Creating folder: Instances. Parent ref: group-v909936. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.023714] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6ba300d-8fb9-467f-a7a0-4dc3df449baf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.035805] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5268fd4f-32cf-5b4c-fe68-1a3ba5f33ae1, 'name': SearchDatastore_Task, 'duration_secs': 0.012452} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.037310] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.037584] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 7f5911fb-785e-444c-9408-c6884e06c5d3/7f5911fb-785e-444c-9408-c6884e06c5d3.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.037866] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Created folder: Instances in parent group-v909936. [ 865.038094] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 865.038330] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d6b4557-6df2-41be-9a3f-ea4ae7139fae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.040737] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.042741] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8739e535-0765-42dc-acfe-db21d2da391d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.063940] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 865.063940] env[65758]: value = "task-4660560" [ 865.063940] env[65758]: _type = "Task" [ 865.063940] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.065475] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.065475] env[65758]: value = "task-4660561" [ 865.065475] env[65758]: _type = "Task" [ 865.065475] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.084024] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660560, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.084024] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660561, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.135496] env[65758]: DEBUG nova.network.neutron [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Successfully updated port: 891c589c-0854-41a8-8eb2-e06ac9124837 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 865.355358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.809s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.357216] env[65758]: DEBUG nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 865.360753] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "interface-85082b72-89dd-47b7-b8ad-f2ad5ad0638d-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.360980] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "interface-85082b72-89dd-47b7-b8ad-f2ad5ad0638d-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.364215] env[65758]: DEBUG nova.objects.instance [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lazy-loading 'flavor' on Instance uuid 85082b72-89dd-47b7-b8ad-f2ad5ad0638d {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.364215] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.132s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.364215] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.367550] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.382s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.367965] env[65758]: INFO nova.compute.claims [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.401541] env[65758]: INFO nova.scheduler.client.report [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Deleted allocations for instance 596a5005-3607-44a2-9c0e-f1a56865011c [ 865.458254] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03237864-2235-433d-8d80-e89090fc2a4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.259877] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "refresh_cache-454bd092-f683-4a3a-91c9-65191d6996f4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.260247] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "refresh_cache-454bd092-f683-4a3a-91c9-65191d6996f4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.260247] env[65758]: DEBUG nova.network.neutron [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 866.263029] env[65758]: WARNING neutronclient.v2_0.client [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 866.263350] env[65758]: WARNING openstack [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 866.263699] env[65758]: WARNING openstack [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 866.268647] env[65758]: DEBUG nova.objects.instance [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lazy-loading 'pci_requests' on Instance uuid 85082b72-89dd-47b7-b8ad-f2ad5ad0638d {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.270094] env[65758]: DEBUG nova.compute.utils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 866.279766] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a832ab-c341-4e71-8d09-fdfaa69eb311 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Suspending the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 866.280370] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62d28458-1012-4e57-be5e-be6cc0b1036d tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "596a5005-3607-44a2-9c0e-f1a56865011c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.132s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.288973] env[65758]: DEBUG nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 866.289299] env[65758]: DEBUG nova.network.neutron [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 866.289628] env[65758]: WARNING neutronclient.v2_0.client [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 866.289919] env[65758]: WARNING neutronclient.v2_0.client [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 866.290480] env[65758]: WARNING openstack [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 866.290819] env[65758]: WARNING openstack [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 866.297067] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquiring lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.297285] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.297495] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "be3de9bd-da98-4c7e-ad7c-933245523695" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.297663] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "be3de9bd-da98-4c7e-ad7c-933245523695" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.297850] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-64b727bf-94c4-4379-8ec8-ec7b9b195f95 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.313898] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660560, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525605} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.314381] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660561, 'name': CreateVM_Task, 'duration_secs': 0.430997} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.317181] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 7f5911fb-785e-444c-9408-c6884e06c5d3/7f5911fb-785e-444c-9408-c6884e06c5d3.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.317417] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.317624] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.318214] env[65758]: DEBUG oslo_vmware.api [None req-e1a832ab-c341-4e71-8d09-fdfaa69eb311 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 866.318214] env[65758]: value = "task-4660562" [ 866.318214] env[65758]: _type = "Task" [ 866.318214] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.319204] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b9383b3-24c3-4ef8-9133-597a24b5414d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.323701] env[65758]: WARNING neutronclient.v2_0.client [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 866.324120] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.324269] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.324577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 866.329026] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cc54042-ad2b-4dcb-a554-ac175e9eb557 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.338170] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 866.338170] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a807bf-5e85-3de0-43a0-2fa970b0bfff" [ 866.338170] env[65758]: _type = "Task" [ 866.338170] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.343275] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 866.343275] env[65758]: value = "task-4660563" [ 866.343275] env[65758]: _type = "Task" [ 866.343275] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.343534] env[65758]: DEBUG oslo_vmware.api [None req-e1a832ab-c341-4e71-8d09-fdfaa69eb311 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660562, 'name': SuspendVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.357207] env[65758]: DEBUG nova.policy [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35ec6fec92574772ba18d61856273108', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bed522365ca465f90708212bdb65510', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 866.366792] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660563, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.367371] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a807bf-5e85-3de0-43a0-2fa970b0bfff, 'name': SearchDatastore_Task, 'duration_secs': 0.011919} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.367633] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.367917] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.368179] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.368319] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.368491] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.368977] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bea8ee9c-3015-4988-8eb5-705134f3a37f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.380054] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.380305] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.381508] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40ddd654-05cd-428c-b3ad-449991d7e728 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.388693] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 866.388693] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527ff73b-7e57-dab4-b24a-2d4f5f198c5d" [ 866.388693] env[65758]: _type = "Task" [ 866.388693] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.399584] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527ff73b-7e57-dab4-b24a-2d4f5f198c5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.683325] env[65758]: DEBUG nova.network.neutron [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Successfully created port: 328056a5-b991-4a04-8444-c1de0afdf0ab {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 866.757294] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.757590] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.757917] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.758220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.758349] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.761467] env[65758]: INFO nova.compute.manager [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Terminating instance [ 866.783641] env[65758]: WARNING neutronclient.v2_0.client [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 866.789996] env[65758]: WARNING openstack [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 866.791013] env[65758]: WARNING openstack [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 866.810286] env[65758]: DEBUG nova.objects.base [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Object Instance<85082b72-89dd-47b7-b8ad-f2ad5ad0638d> lazy-loaded attributes: flavor,pci_requests {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 866.810556] env[65758]: DEBUG nova.network.neutron [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 866.810971] env[65758]: WARNING neutronclient.v2_0.client [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 866.811365] env[65758]: WARNING neutronclient.v2_0.client [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 866.813863] env[65758]: WARNING openstack [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 866.813863] env[65758]: WARNING openstack [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 866.838060] env[65758]: DEBUG nova.compute.manager [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Received event network-changed-1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 866.838060] env[65758]: DEBUG nova.compute.manager [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Refreshing instance network info cache due to event network-changed-1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 866.838060] env[65758]: DEBUG oslo_concurrency.lockutils [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] Acquiring lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.838060] env[65758]: DEBUG oslo_concurrency.lockutils [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] Acquired lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.838060] env[65758]: DEBUG nova.network.neutron [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Refreshing network info cache for port 1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 866.838060] env[65758]: DEBUG nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 866.857577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.857789] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.858095] env[65758]: DEBUG nova.network.neutron [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 866.866501] env[65758]: DEBUG oslo_vmware.api [None req-e1a832ab-c341-4e71-8d09-fdfaa69eb311 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660562, 'name': SuspendVM_Task} progress is 91%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.874253] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660563, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078873} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.874605] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.876399] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bee81a-75e5-4038-8221-a8fca31db7a8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.919610] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 7f5911fb-785e-444c-9408-c6884e06c5d3/7f5911fb-785e-444c-9408-c6884e06c5d3.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.925675] env[65758]: DEBUG nova.network.neutron [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 866.934221] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48f16b12-0cc7-4227-b896-ba3fe6b12702 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.977627] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527ff73b-7e57-dab4-b24a-2d4f5f198c5d, 'name': SearchDatastore_Task, 'duration_secs': 0.012283} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.978025] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 866.978025] env[65758]: value = "task-4660564" [ 866.978025] env[65758]: _type = "Task" [ 866.978025] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.983067] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98ee7946-ced8-4b0e-be14-a106f6603142 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.000931] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 867.000931] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f83cc9-84e9-43db-9159-d5bceb6e8d6d" [ 867.000931] env[65758]: _type = "Task" [ 867.000931] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.009140] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660564, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.023186] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f83cc9-84e9-43db-9159-d5bceb6e8d6d, 'name': SearchDatastore_Task, 'duration_secs': 0.012132} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.026885] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.027344] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3/3a7d0c08-9de6-47f4-a0c3-871458ccc4e3.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.028577] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80c64a10-06eb-4589-95e4-68ddb018f312 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.039297] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 867.039297] env[65758]: value = "task-4660565" [ 867.039297] env[65758]: _type = "Task" [ 867.039297] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.053709] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.112449] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e60469dc-7e68-4cbb-aec7-75948f554998 tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "interface-85082b72-89dd-47b7-b8ad-f2ad5ad0638d-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.751s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.185305] env[65758]: DEBUG nova.compute.manager [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Received event network-changed-31402f5e-3e8a-4ff8-a2b3-4b5992fb142a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 867.185713] env[65758]: DEBUG nova.compute.manager [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Refreshing instance network info cache due to event network-changed-31402f5e-3e8a-4ff8-a2b3-4b5992fb142a. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 867.185826] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Acquiring lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.185860] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Acquired lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.186024] env[65758]: DEBUG nova.network.neutron [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Refreshing network info cache for port 31402f5e-3e8a-4ff8-a2b3-4b5992fb142a {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 867.206937] env[65758]: WARNING neutronclient.v2_0.client [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 867.207722] env[65758]: WARNING openstack [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.208112] env[65758]: WARNING openstack [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.269990] env[65758]: DEBUG nova.compute.manager [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 867.270461] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 867.278446] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2067166-fdfc-444c-abf3-fd0de076d944 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.289844] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 867.290042] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c5ff150-5764-41f3-bf80-ddeefb349b6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.302462] env[65758]: DEBUG oslo_vmware.api [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 867.302462] env[65758]: value = "task-4660566" [ 867.302462] env[65758]: _type = "Task" [ 867.302462] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.320133] env[65758]: DEBUG oslo_vmware.api [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.351042] env[65758]: WARNING neutronclient.v2_0.client [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 867.351042] env[65758]: WARNING openstack [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.351415] env[65758]: WARNING openstack [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.368950] env[65758]: WARNING neutronclient.v2_0.client [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 867.369898] env[65758]: WARNING openstack [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.370514] env[65758]: WARNING openstack [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.379336] env[65758]: DEBUG oslo_vmware.api [None req-e1a832ab-c341-4e71-8d09-fdfaa69eb311 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660562, 'name': SuspendVM_Task, 'duration_secs': 0.950967} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.384380] env[65758]: DEBUG nova.network.neutron [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Updating instance_info_cache with network_info: [{"id": "891c589c-0854-41a8-8eb2-e06ac9124837", "address": "fa:16:3e:93:38:50", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap891c589c-08", "ovs_interfaceid": "891c589c-0854-41a8-8eb2-e06ac9124837", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 867.386957] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a832ab-c341-4e71-8d09-fdfaa69eb311 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Suspended the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 867.387691] env[65758]: DEBUG nova.compute.manager [None req-e1a832ab-c341-4e71-8d09-fdfaa69eb311 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 867.388810] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1612c498-182a-4292-ba6a-dd84721cfd1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.500383] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660564, 'name': ReconfigVM_Task, 'duration_secs': 0.361252} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.500734] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 7f5911fb-785e-444c-9408-c6884e06c5d3/7f5911fb-785e-444c-9408-c6884e06c5d3.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.504491] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c3356c2-c684-4103-82f2-7eff5562972d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.514674] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 867.514674] env[65758]: value = "task-4660567" [ 867.514674] env[65758]: _type = "Task" [ 867.514674] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.530858] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660567, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.551219] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660565, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.684455] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a1b911-63be-4c0e-9185-cb2508ca1774 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.690859] env[65758]: WARNING neutronclient.v2_0.client [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 867.691540] env[65758]: WARNING openstack [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.691896] env[65758]: WARNING openstack [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.706278] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85df6b50-a56f-45a3-9862-2fe037cc9518 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.744795] env[65758]: WARNING neutronclient.v2_0.client [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 867.745524] env[65758]: WARNING openstack [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.745883] env[65758]: WARNING openstack [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.754359] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa599ea-ac57-4915-8701-d153904abb5e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.764141] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd5cd75-3d5d-4472-9dd9-9d69b60d1897 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.779709] env[65758]: DEBUG nova.compute.provider_tree [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.785293] env[65758]: WARNING neutronclient.v2_0.client [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 867.785890] env[65758]: WARNING openstack [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 867.786320] env[65758]: WARNING openstack [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 867.813921] env[65758]: DEBUG oslo_vmware.api [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660566, 'name': PowerOffVM_Task, 'duration_secs': 0.41699} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.814252] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 867.814691] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 867.817355] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c45c3d0-1d52-4b2c-b080-c1223777daa4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.860825] env[65758]: DEBUG nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 867.886680] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 867.886949] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 867.887118] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 867.887306] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 867.887444] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 867.887587] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 867.887789] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 867.887944] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 867.888139] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 867.888301] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 867.888462] env[65758]: DEBUG nova.virt.hardware [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 867.889127] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "refresh_cache-454bd092-f683-4a3a-91c9-65191d6996f4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.889493] env[65758]: DEBUG nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Instance network_info: |[{"id": "891c589c-0854-41a8-8eb2-e06ac9124837", "address": "fa:16:3e:93:38:50", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap891c589c-08", "ovs_interfaceid": "891c589c-0854-41a8-8eb2-e06ac9124837", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 867.890396] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eecc563e-736d-4af2-abe3-03c54ac6b1ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.898437] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:38:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49b5df12-d801-4140-8816-2fd401608c7d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '891c589c-0854-41a8-8eb2-e06ac9124837', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.906416] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 867.906685] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 867.906875] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 867.907103] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleting the datastore file [datastore2] 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 867.910381] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.911109] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3099e81-2104-4d27-94c1-8351466f9ffd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.914117] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-889f092a-b545-4560-a4b8-39cedc651d51 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.938438] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e096d6b9-9df8-4664-b9ed-e5e0b8708dce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.944424] env[65758]: DEBUG oslo_vmware.api [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 867.944424] env[65758]: value = "task-4660569" [ 867.944424] env[65758]: _type = "Task" [ 867.944424] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.946224] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.946224] env[65758]: value = "task-4660570" [ 867.946224] env[65758]: _type = "Task" [ 867.946224] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.972191] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660570, 'name': CreateVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.974989] env[65758]: DEBUG oslo_vmware.api [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660569, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.028072] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660567, 'name': Rename_Task, 'duration_secs': 0.2364} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.028398] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.028723] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e60200b-d9c2-44c0-8556-26608634311b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.036204] env[65758]: DEBUG nova.network.neutron [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updated VIF entry in instance network info cache for port 1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 868.036204] env[65758]: DEBUG nova.network.neutron [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updating instance_info_cache with network_info: [{"id": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "address": "fa:16:3e:f0:a2:8e", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb28e5b-cb", "ovs_interfaceid": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 868.038921] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 868.038921] env[65758]: value = "task-4660571" [ 868.038921] env[65758]: _type = "Task" [ 868.038921] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.054627] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660571, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.057823] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660565, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549258} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.058123] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3/3a7d0c08-9de6-47f4-a0c3-871458ccc4e3.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.058366] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.058900] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a23fb3a6-002a-44ef-827e-6b5d7787f432 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.067882] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 868.067882] env[65758]: value = "task-4660572" [ 868.067882] env[65758]: _type = "Task" [ 868.067882] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.074977] env[65758]: DEBUG nova.network.neutron [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance_info_cache with network_info: [{"id": "e31ffc86-5e08-405f-8129-6af1973003bf", "address": "fa:16:3e:88:0c:68", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31ffc86-5e", "ovs_interfaceid": "e31ffc86-5e08-405f-8129-6af1973003bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 868.085047] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.181926] env[65758]: WARNING neutronclient.v2_0.client [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 868.182701] env[65758]: WARNING openstack [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 868.183149] env[65758]: WARNING openstack [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 868.284072] env[65758]: DEBUG nova.scheduler.client.report [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 868.336224] env[65758]: DEBUG nova.network.neutron [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Successfully updated port: 328056a5-b991-4a04-8444-c1de0afdf0ab {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 868.428879] env[65758]: DEBUG nova.network.neutron [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Updated VIF entry in instance network info cache for port 31402f5e-3e8a-4ff8-a2b3-4b5992fb142a. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 868.429361] env[65758]: DEBUG nova.network.neutron [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Updating instance_info_cache with network_info: [{"id": "31402f5e-3e8a-4ff8-a2b3-4b5992fb142a", "address": "fa:16:3e:ec:b2:6e", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31402f5e-3e", "ovs_interfaceid": "31402f5e-3e8a-4ff8-a2b3-4b5992fb142a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 868.466869] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660570, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.471024] env[65758]: DEBUG oslo_vmware.api [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660569, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155254} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.472022] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 868.472022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 868.472022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 868.472022] env[65758]: INFO nova.compute.manager [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Took 1.20 seconds to destroy the instance on the hypervisor. [ 868.472185] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 868.472331] env[65758]: DEBUG nova.compute.manager [-] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 868.472425] env[65758]: DEBUG nova.network.neutron [-] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 868.472952] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 868.473882] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 868.473956] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 868.539166] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 868.541518] env[65758]: DEBUG oslo_concurrency.lockutils [req-35c4c66f-e976-401a-9018-27e3bb7590a0 req-c74a0b06-898c-4514-b470-ccf3617c38f1 service nova] Releasing lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.552253] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660571, 'name': PowerOnVM_Task} progress is 87%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.579202] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.583534] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069865} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.584734] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 868.586072] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6523d414-6baf-4a3c-a0d3-0831fc51f1b8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.615696] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3/3a7d0c08-9de6-47f4-a0c3-871458ccc4e3.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.616122] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09d6d512-aac2-477d-b08b-8f27a789b484 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.639512] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 868.639512] env[65758]: value = "task-4660573" [ 868.639512] env[65758]: _type = "Task" [ 868.639512] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.651143] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660573, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.789733] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.423s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.789733] env[65758]: DEBUG nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 868.794387] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.832s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.794387] env[65758]: DEBUG nova.objects.instance [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lazy-loading 'resources' on Instance uuid a662eac8-07e2-47f1-a4dd-9abbe824817d {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.840042] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "refresh_cache-31816c0c-d7d2-48db-9a87-a1e03c938a60" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.840042] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "refresh_cache-31816c0c-d7d2-48db-9a87-a1e03c938a60" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.840163] env[65758]: DEBUG nova.network.neutron [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 868.933399] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Releasing lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.934311] env[65758]: DEBUG nova.compute.manager [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Received event network-vif-plugged-891c589c-0854-41a8-8eb2-e06ac9124837 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 868.934608] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Acquiring lock "454bd092-f683-4a3a-91c9-65191d6996f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.935410] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Lock "454bd092-f683-4a3a-91c9-65191d6996f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.935410] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Lock "454bd092-f683-4a3a-91c9-65191d6996f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.935410] env[65758]: DEBUG nova.compute.manager [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] No waiting events found dispatching network-vif-plugged-891c589c-0854-41a8-8eb2-e06ac9124837 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 868.935410] env[65758]: WARNING nova.compute.manager [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Received unexpected event network-vif-plugged-891c589c-0854-41a8-8eb2-e06ac9124837 for instance with vm_state building and task_state spawning. [ 868.937885] env[65758]: DEBUG nova.compute.manager [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Received event network-changed-891c589c-0854-41a8-8eb2-e06ac9124837 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 868.937885] env[65758]: DEBUG nova.compute.manager [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Refreshing instance network info cache due to event network-changed-891c589c-0854-41a8-8eb2-e06ac9124837. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 868.937885] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Acquiring lock "refresh_cache-454bd092-f683-4a3a-91c9-65191d6996f4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.937885] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Acquired lock "refresh_cache-454bd092-f683-4a3a-91c9-65191d6996f4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.937885] env[65758]: DEBUG nova.network.neutron [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Refreshing network info cache for port 891c589c-0854-41a8-8eb2-e06ac9124837 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 868.966213] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660570, 'name': CreateVM_Task, 'duration_secs': 0.523084} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.966213] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.966629] env[65758]: WARNING neutronclient.v2_0.client [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 868.967089] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.967552] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.967910] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.968233] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a16ecf91-ab06-4228-8e05-4f4b1d603039 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.973990] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 868.973990] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5291208d-ee59-cefa-d3b1-ef71d1d1ed81" [ 868.973990] env[65758]: _type = "Task" [ 868.973990] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.983739] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5291208d-ee59-cefa-d3b1-ef71d1d1ed81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.052293] env[65758]: DEBUG oslo_vmware.api [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660571, 'name': PowerOnVM_Task, 'duration_secs': 0.658294} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.052557] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.052864] env[65758]: INFO nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Took 9.45 seconds to spawn the instance on the hypervisor. [ 869.053083] env[65758]: DEBUG nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 869.054024] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9198c0db-ee8b-45c8-a482-b1185a3f25b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.112295] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72894f7e-a54d-4465-b258-1237f930fce3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.133308] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228a3df1-3c38-40e2-947e-6340ed8aaaa9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.141285] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance 'a9ec9a64-94c7-41a5-a7a4-5e034ddfc592' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 869.157714] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660573, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.239696] env[65758]: DEBUG nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Received event network-changed-bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 869.239892] env[65758]: DEBUG nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Refreshing instance network info cache due to event network-changed-bb884939-9aaf-474f-9246-eb279d11aa4e. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 869.240131] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Acquiring lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.240274] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Acquired lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.240426] env[65758]: DEBUG nova.network.neutron [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Refreshing network info cache for port bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 869.280104] env[65758]: DEBUG nova.network.neutron [-] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 869.295709] env[65758]: DEBUG nova.compute.utils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 869.297898] env[65758]: DEBUG nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 869.298128] env[65758]: DEBUG nova.network.neutron [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 869.298470] env[65758]: WARNING neutronclient.v2_0.client [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 869.298822] env[65758]: WARNING neutronclient.v2_0.client [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 869.299791] env[65758]: WARNING openstack [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 869.301137] env[65758]: WARNING openstack [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 869.342645] env[65758]: WARNING openstack [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 869.343114] env[65758]: WARNING openstack [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 869.358319] env[65758]: DEBUG nova.policy [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3acf0a8cd564f81914c7f95a3c4dce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3216444936b0444184f3cbb1497fffc6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 869.396971] env[65758]: DEBUG nova.network.neutron [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 869.440102] env[65758]: WARNING neutronclient.v2_0.client [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 869.440523] env[65758]: WARNING openstack [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 869.440963] env[65758]: WARNING openstack [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 869.487760] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5291208d-ee59-cefa-d3b1-ef71d1d1ed81, 'name': SearchDatastore_Task, 'duration_secs': 0.018816} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.490668] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.490945] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.491187] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.491327] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.493193] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.495304] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6dbce1f-ffad-4291-a817-cb76e9008f62 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.505856] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.506072] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.506924] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a847c617-6285-4e13-8052-586e72b5bba1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.516085] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 869.516085] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d94a17-e70d-bef8-04aa-cccf0991d36e" [ 869.516085] env[65758]: _type = "Task" [ 869.516085] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.525635] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d94a17-e70d-bef8-04aa-cccf0991d36e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.574029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "cca3e019-8e82-4473-8609-291703762a6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.574216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "cca3e019-8e82-4473-8609-291703762a6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.574497] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "cca3e019-8e82-4473-8609-291703762a6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.574715] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "cca3e019-8e82-4473-8609-291703762a6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.574947] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "cca3e019-8e82-4473-8609-291703762a6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.576491] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.576730] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.576950] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.577196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.577396] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.582119] env[65758]: INFO nova.compute.manager [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Terminating instance [ 869.584971] env[65758]: WARNING neutronclient.v2_0.client [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 869.585680] env[65758]: WARNING openstack [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 869.586086] env[65758]: WARNING openstack [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 869.595418] env[65758]: INFO nova.compute.manager [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Terminating instance [ 869.620619] env[65758]: INFO nova.compute.manager [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Took 53.50 seconds to build instance. [ 869.657998] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 869.659687] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eaf68010-2a81-4a5a-a9af-15ca0d6061e5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.666071] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660573, 'name': ReconfigVM_Task, 'duration_secs': 0.73805} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.667375] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3/3a7d0c08-9de6-47f4-a0c3-871458ccc4e3.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.667637] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-851e38c2-04fe-4bdb-a968-180523462ec5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.672101] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 869.672101] env[65758]: value = "task-4660574" [ 869.672101] env[65758]: _type = "Task" [ 869.672101] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.676544] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 869.676544] env[65758]: value = "task-4660575" [ 869.676544] env[65758]: _type = "Task" [ 869.676544] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.693512] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.700449] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660575, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.727176] env[65758]: DEBUG nova.network.neutron [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Updating instance_info_cache with network_info: [{"id": "328056a5-b991-4a04-8444-c1de0afdf0ab", "address": "fa:16:3e:06:3a:d1", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328056a5-b9", "ovs_interfaceid": "328056a5-b991-4a04-8444-c1de0afdf0ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 869.733064] env[65758]: DEBUG nova.network.neutron [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Successfully created port: 6feb5c74-32da-4591-acdf-492fb5e3aebb {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 869.743070] env[65758]: WARNING neutronclient.v2_0.client [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 869.744505] env[65758]: WARNING openstack [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 869.744505] env[65758]: WARNING openstack [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 869.783051] env[65758]: INFO nova.compute.manager [-] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Took 1.31 seconds to deallocate network for instance. [ 869.811541] env[65758]: DEBUG nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 869.844981] env[65758]: WARNING neutronclient.v2_0.client [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 869.845758] env[65758]: WARNING openstack [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 869.846157] env[65758]: WARNING openstack [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 869.992344] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd19858a-847c-4810-9b54-296d896f5649 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.000930] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37200d74-7c86-468d-bc5f-5d079a634298 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.042187] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3962cf4b-5820-4e6f-b25d-b60847a4cce7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.058996] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e538fd-8115-41dc-bbce-90f7cf6ad872 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.065869] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d94a17-e70d-bef8-04aa-cccf0991d36e, 'name': SearchDatastore_Task, 'duration_secs': 0.011924} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.071026] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7912be1c-4851-4a0e-963a-d0f00b110c3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.081756] env[65758]: DEBUG nova.compute.provider_tree [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.088184] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 870.088184] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b88535-dc20-568b-7d24-62a585ddbfab" [ 870.088184] env[65758]: _type = "Task" [ 870.088184] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.097987] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b88535-dc20-568b-7d24-62a585ddbfab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.103229] env[65758]: DEBUG nova.compute.manager [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 870.103443] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 870.106025] env[65758]: DEBUG nova.compute.manager [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 870.106025] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 870.107508] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d27cf4-fb8a-466f-ab01-0aee7bfd0cd7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.110902] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1c40c1-2bcb-41ca-87fd-213eb084e1b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.115396] env[65758]: DEBUG nova.network.neutron [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Updated VIF entry in instance network info cache for port 891c589c-0854-41a8-8eb2-e06ac9124837. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 870.115745] env[65758]: DEBUG nova.network.neutron [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Updating instance_info_cache with network_info: [{"id": "891c589c-0854-41a8-8eb2-e06ac9124837", "address": "fa:16:3e:93:38:50", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap891c589c-08", "ovs_interfaceid": "891c589c-0854-41a8-8eb2-e06ac9124837", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 870.122281] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3afad0f9-8961-4a07-a360-44b89c249d49 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.159s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.122739] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 870.125922] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf6f62fb-ff91-449b-bb03-87d5572c6c27 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.127529] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 870.128403] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50953ab4-0004-45c6-9bfe-227d49ec9fce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.139030] env[65758]: DEBUG oslo_vmware.api [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 870.139030] env[65758]: value = "task-4660576" [ 870.139030] env[65758]: _type = "Task" [ 870.139030] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.139631] env[65758]: DEBUG oslo_vmware.api [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 870.139631] env[65758]: value = "task-4660577" [ 870.139631] env[65758]: _type = "Task" [ 870.139631] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.153733] env[65758]: DEBUG oslo_vmware.api [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660576, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.158665] env[65758]: DEBUG oslo_vmware.api [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.184463] env[65758]: DEBUG oslo_vmware.api [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660574, 'name': PowerOnVM_Task, 'duration_secs': 0.4143} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.185242] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.188021] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7f857972-a9b9-4597-9dde-06533d1af4a7 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance 'a9ec9a64-94c7-41a5-a7a4-5e034ddfc592' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 870.196870] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660575, 'name': Rename_Task, 'duration_secs': 0.166203} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.198372] env[65758]: WARNING neutronclient.v2_0.client [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 870.199034] env[65758]: WARNING openstack [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 870.199388] env[65758]: WARNING openstack [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 870.208548] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.209363] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd8e1184-20ca-451a-a46b-2cc9a3f88195 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.218943] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 870.218943] env[65758]: value = "task-4660578" [ 870.218943] env[65758]: _type = "Task" [ 870.218943] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.228532] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.237383] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "refresh_cache-31816c0c-d7d2-48db-9a87-a1e03c938a60" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.237830] env[65758]: DEBUG nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Instance network_info: |[{"id": "328056a5-b991-4a04-8444-c1de0afdf0ab", "address": "fa:16:3e:06:3a:d1", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328056a5-b9", "ovs_interfaceid": "328056a5-b991-4a04-8444-c1de0afdf0ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 870.238643] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:3a:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49b5df12-d801-4140-8816-2fd401608c7d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '328056a5-b991-4a04-8444-c1de0afdf0ab', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.246388] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 870.249565] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.250075] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e38eee62-1cae-4209-8260-dbe1716cef6e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.273985] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.273985] env[65758]: value = "task-4660579" [ 870.273985] env[65758]: _type = "Task" [ 870.273985] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.284835] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660579, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.298063] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.387828] env[65758]: DEBUG nova.network.neutron [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Updated VIF entry in instance network info cache for port bb884939-9aaf-474f-9246-eb279d11aa4e. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 870.388662] env[65758]: DEBUG nova.network.neutron [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Updating instance_info_cache with network_info: [{"id": "bb884939-9aaf-474f-9246-eb279d11aa4e", "address": "fa:16:3e:56:a6:cd", "network": {"id": "894a2450-645a-478b-a119-7a6383b9b715", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1509187597-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee7ee47fdf0c4bf9802f9f6ef642150a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb884939-9a", "ovs_interfaceid": "bb884939-9aaf-474f-9246-eb279d11aa4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 870.585252] env[65758]: DEBUG nova.scheduler.client.report [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 870.600991] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b88535-dc20-568b-7d24-62a585ddbfab, 'name': SearchDatastore_Task, 'duration_secs': 0.03056} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.601384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.601951] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 454bd092-f683-4a3a-91c9-65191d6996f4/454bd092-f683-4a3a-91c9-65191d6996f4.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.602948] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9620d3c3-824d-4f7d-9367-414ea5f91b25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.611517] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 870.611517] env[65758]: value = "task-4660580" [ 870.611517] env[65758]: _type = "Task" [ 870.611517] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.620358] env[65758]: DEBUG oslo_concurrency.lockutils [req-2497b02b-5e81-4cea-b092-36237a4630f2 req-04bcacb1-07f5-41b0-8722-c4fca4a8f2eb service nova] Releasing lock "refresh_cache-454bd092-f683-4a3a-91c9-65191d6996f4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.621064] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.629274] env[65758]: DEBUG nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 870.655257] env[65758]: DEBUG oslo_vmware.api [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660577, 'name': PowerOffVM_Task, 'duration_secs': 0.284373} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.659199] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 870.659485] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 870.659879] env[65758]: DEBUG oslo_vmware.api [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660576, 'name': PowerOffVM_Task, 'duration_secs': 0.2838} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.660181] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fd20bfb-3e7f-48a1-950e-8aa1979b2764 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.662229] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 870.662530] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 870.662866] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f77a290-669e-46b6-b559-5fa4f140a368 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.735830] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660578, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.751816] env[65758]: DEBUG nova.compute.manager [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 870.754198] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f786a83b-e3b0-4877-af33-76c4190d6f91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.771211] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.771508] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.771751] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Deleting the datastore file [datastore1] 85082b72-89dd-47b7-b8ad-f2ad5ad0638d {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.772083] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9422815-937f-4e39-8587-2bf4c40c0b70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.781802] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.782200] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.782407] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Deleting the datastore file [datastore2] cca3e019-8e82-4473-8609-291703762a6e {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.782853] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8941dd0b-11f6-4234-9e6d-060ea5db9170 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.790131] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660579, 'name': CreateVM_Task, 'duration_secs': 0.424922} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.793371] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.793975] env[65758]: DEBUG oslo_vmware.api [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for the task: (returnval){ [ 870.793975] env[65758]: value = "task-4660583" [ 870.793975] env[65758]: _type = "Task" [ 870.793975] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.795697] env[65758]: WARNING neutronclient.v2_0.client [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 870.796305] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.796582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.797133] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 870.799203] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47fff571-7a10-4b33-861c-f8d74ff277e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.806389] env[65758]: DEBUG oslo_vmware.api [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for the task: (returnval){ [ 870.806389] env[65758]: value = "task-4660584" [ 870.806389] env[65758]: _type = "Task" [ 870.806389] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.817940] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 870.817940] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52827f66-a095-01d2-e236-5fcbb5d7d674" [ 870.817940] env[65758]: _type = "Task" [ 870.817940] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.818352] env[65758]: DEBUG oslo_vmware.api [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.829919] env[65758]: DEBUG nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 870.832396] env[65758]: DEBUG oslo_vmware.api [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.840750] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52827f66-a095-01d2-e236-5fcbb5d7d674, 'name': SearchDatastore_Task, 'duration_secs': 0.023844} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.841180] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.841469] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.841747] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.841921] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.842162] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.842495] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c986b29-8062-4418-99cd-9e0179035a8e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.856245] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.856367] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.857176] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92c2abf4-3322-4d4f-8703-297e944adfda {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.862174] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 870.862441] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 870.862593] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 870.862837] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 870.863024] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 870.863181] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 870.863384] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 870.863575] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 870.863799] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 870.864021] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 870.864220] env[65758]: DEBUG nova.virt.hardware [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 870.865168] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97521c5f-faaf-4e40-99a8-11c95965a4fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.872340] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 870.872340] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52282870-0514-1ef8-4e51-63ea7d91ce80" [ 870.872340] env[65758]: _type = "Task" [ 870.872340] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.879389] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc1f7c3-e658-4987-bc44-308836df42b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.890309] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52282870-0514-1ef8-4e51-63ea7d91ce80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.899472] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Releasing lock "refresh_cache-d42d0818-1486-4696-9871-2cf989aeb885" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.899661] env[65758]: DEBUG nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Received event network-vif-plugged-328056a5-b991-4a04-8444-c1de0afdf0ab {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 870.899887] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Acquiring lock "31816c0c-d7d2-48db-9a87-a1e03c938a60-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.900128] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Lock "31816c0c-d7d2-48db-9a87-a1e03c938a60-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.900319] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Lock "31816c0c-d7d2-48db-9a87-a1e03c938a60-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.900737] env[65758]: DEBUG nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] No waiting events found dispatching network-vif-plugged-328056a5-b991-4a04-8444-c1de0afdf0ab {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 870.900737] env[65758]: WARNING nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Received unexpected event network-vif-plugged-328056a5-b991-4a04-8444-c1de0afdf0ab for instance with vm_state building and task_state spawning. [ 870.900860] env[65758]: DEBUG nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Received event network-changed-328056a5-b991-4a04-8444-c1de0afdf0ab {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 870.900993] env[65758]: DEBUG nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Refreshing instance network info cache due to event network-changed-328056a5-b991-4a04-8444-c1de0afdf0ab. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 870.901271] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Acquiring lock "refresh_cache-31816c0c-d7d2-48db-9a87-a1e03c938a60" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.901432] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Acquired lock "refresh_cache-31816c0c-d7d2-48db-9a87-a1e03c938a60" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.901591] env[65758]: DEBUG nova.network.neutron [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Refreshing network info cache for port 328056a5-b991-4a04-8444-c1de0afdf0ab {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 871.091363] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.298s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.094811] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.900s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.095186] env[65758]: DEBUG nova.objects.instance [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 871.124736] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660580, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.129528] env[65758]: INFO nova.scheduler.client.report [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Deleted allocations for instance a662eac8-07e2-47f1-a4dd-9abbe824817d [ 871.161108] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.237687] env[65758]: DEBUG oslo_vmware.api [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660578, 'name': PowerOnVM_Task, 'duration_secs': 0.624509} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.238105] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.239506] env[65758]: INFO nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Took 9.44 seconds to spawn the instance on the hypervisor. [ 871.239506] env[65758]: DEBUG nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 871.239506] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f306c30-c276-497d-b935-d1e90f011c1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.268570] env[65758]: INFO nova.compute.manager [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] instance snapshotting [ 871.269050] env[65758]: WARNING nova.compute.manager [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 871.273429] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159f8bb5-aaff-41e2-b721-627fba6aa008 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.305359] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e0ddba-6dfd-4454-bfe1-66ee5ad3314e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.317635] env[65758]: DEBUG oslo_vmware.api [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Task: {'id': task-4660583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.465626} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.320223] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.320412] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.320673] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.320838] env[65758]: INFO nova.compute.manager [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Took 1.21 seconds to destroy the instance on the hypervisor. [ 871.321218] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 871.330450] env[65758]: DEBUG nova.compute.manager [-] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 871.330450] env[65758]: DEBUG nova.network.neutron [-] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 871.330765] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 871.331764] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 871.331764] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 871.339905] env[65758]: DEBUG oslo_vmware.api [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.386797] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52282870-0514-1ef8-4e51-63ea7d91ce80, 'name': SearchDatastore_Task, 'duration_secs': 0.023341} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.387788] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4df7ee6b-1653-410b-9000-faaaaa04102b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.392258] env[65758]: DEBUG nova.network.neutron [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Successfully updated port: 6feb5c74-32da-4591-acdf-492fb5e3aebb {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 871.395481] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 871.395481] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d53f9c-6620-4350-1c18-cd2d4d883783" [ 871.395481] env[65758]: _type = "Task" [ 871.395481] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.407878] env[65758]: WARNING neutronclient.v2_0.client [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 871.408735] env[65758]: WARNING openstack [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 871.409160] env[65758]: WARNING openstack [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 871.418429] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d53f9c-6620-4350-1c18-cd2d4d883783, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.420085] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 871.624678] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660580, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622063} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.624959] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 454bd092-f683-4a3a-91c9-65191d6996f4/454bd092-f683-4a3a-91c9-65191d6996f4.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.625180] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.625450] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bad8dc4a-f8b0-4448-a917-ba377142e96b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.632995] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 871.632995] env[65758]: value = "task-4660585" [ 871.632995] env[65758]: _type = "Task" [ 871.632995] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.640477] env[65758]: DEBUG oslo_concurrency.lockutils [None req-da389cb0-4300-4650-ba39-96411466233b tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "a662eac8-07e2-47f1-a4dd-9abbe824817d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.164s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.650437] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660585, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.724593] env[65758]: WARNING neutronclient.v2_0.client [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 871.725362] env[65758]: WARNING openstack [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 871.725716] env[65758]: WARNING openstack [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 871.767467] env[65758]: INFO nova.compute.manager [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Took 50.89 seconds to build instance. [ 871.826140] env[65758]: DEBUG oslo_vmware.api [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Task: {'id': task-4660584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.618555} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.826492] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.826714] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.826936] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.827168] env[65758]: INFO nova.compute.manager [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] [instance: cca3e019-8e82-4473-8609-291703762a6e] Took 1.72 seconds to destroy the instance on the hypervisor. [ 871.827481] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 871.827739] env[65758]: DEBUG nova.compute.manager [-] [instance: cca3e019-8e82-4473-8609-291703762a6e] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 871.827870] env[65758]: DEBUG nova.network.neutron [-] [instance: cca3e019-8e82-4473-8609-291703762a6e] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 871.828170] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 871.831476] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 871.831476] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 871.842031] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 871.842511] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-64347c5d-14ad-4928-8715-ce81111d2b78 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.852413] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 871.852413] env[65758]: value = "task-4660586" [ 871.852413] env[65758]: _type = "Task" [ 871.852413] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.863055] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660586, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.865703] env[65758]: DEBUG nova.network.neutron [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Updated VIF entry in instance network info cache for port 328056a5-b991-4a04-8444-c1de0afdf0ab. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 871.866135] env[65758]: DEBUG nova.network.neutron [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Updating instance_info_cache with network_info: [{"id": "328056a5-b991-4a04-8444-c1de0afdf0ab", "address": "fa:16:3e:06:3a:d1", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328056a5-b9", "ovs_interfaceid": "328056a5-b991-4a04-8444-c1de0afdf0ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 871.872822] env[65758]: DEBUG nova.compute.manager [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Received event network-changed-1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 871.873060] env[65758]: DEBUG nova.compute.manager [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Refreshing instance network info cache due to event network-changed-1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 871.873286] env[65758]: DEBUG oslo_concurrency.lockutils [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] Acquiring lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.873435] env[65758]: DEBUG oslo_concurrency.lockutils [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] Acquired lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.873556] env[65758]: DEBUG nova.network.neutron [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Refreshing network info cache for port 1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 871.899329] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "refresh_cache-a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.899523] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "refresh_cache-a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.899704] env[65758]: DEBUG nova.network.neutron [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 871.913933] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d53f9c-6620-4350-1c18-cd2d4d883783, 'name': SearchDatastore_Task, 'duration_secs': 0.017331} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.915132] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 871.917252] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.917749] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 31816c0c-d7d2-48db-9a87-a1e03c938a60/31816c0c-d7d2-48db-9a87-a1e03c938a60.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.918640] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3db62170-80ed-41b7-81fa-59b7057b36e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.927779] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 871.927779] env[65758]: value = "task-4660587" [ 871.927779] env[65758]: _type = "Task" [ 871.927779] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.938301] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660587, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.964726] env[65758]: DEBUG nova.compute.manager [req-c3e1bdb2-7af9-4a0b-a2f2-2b43887b2fa2 req-e25daa6f-4c00-4c57-ac32-77ec2193ad84 service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Received event network-vif-plugged-6feb5c74-32da-4591-acdf-492fb5e3aebb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 871.965198] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3e1bdb2-7af9-4a0b-a2f2-2b43887b2fa2 req-e25daa6f-4c00-4c57-ac32-77ec2193ad84 service nova] Acquiring lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.965423] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3e1bdb2-7af9-4a0b-a2f2-2b43887b2fa2 req-e25daa6f-4c00-4c57-ac32-77ec2193ad84 service nova] Lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.965639] env[65758]: DEBUG oslo_concurrency.lockutils [req-c3e1bdb2-7af9-4a0b-a2f2-2b43887b2fa2 req-e25daa6f-4c00-4c57-ac32-77ec2193ad84 service nova] Lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.965767] env[65758]: DEBUG nova.compute.manager [req-c3e1bdb2-7af9-4a0b-a2f2-2b43887b2fa2 req-e25daa6f-4c00-4c57-ac32-77ec2193ad84 service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] No waiting events found dispatching network-vif-plugged-6feb5c74-32da-4591-acdf-492fb5e3aebb {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 871.965934] env[65758]: WARNING nova.compute.manager [req-c3e1bdb2-7af9-4a0b-a2f2-2b43887b2fa2 req-e25daa6f-4c00-4c57-ac32-77ec2193ad84 service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Received unexpected event network-vif-plugged-6feb5c74-32da-4591-acdf-492fb5e3aebb for instance with vm_state building and task_state spawning. [ 872.111922] env[65758]: DEBUG oslo_concurrency.lockutils [None req-792ffbe9-00d4-4a82-90c9-3d006c9f9e6d tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.114659] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.798s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.115391] env[65758]: INFO nova.compute.claims [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.146500] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660585, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073484} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.146841] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.149836] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da9935f-e5eb-4a06-8c69-308104f5ac96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.178489] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 454bd092-f683-4a3a-91c9-65191d6996f4/454bd092-f683-4a3a-91c9-65191d6996f4.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.181072] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44a5cf6c-8595-49b8-818b-89d2286702a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.204990] env[65758]: DEBUG nova.network.neutron [-] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 872.208157] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 872.208157] env[65758]: value = "task-4660588" [ 872.208157] env[65758]: _type = "Task" [ 872.208157] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.219952] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660588, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.271929] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b56ae4a3-9ad5-49a0-87ae-1985b9047508 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.256s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.365043] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660586, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.368755] env[65758]: DEBUG oslo_concurrency.lockutils [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] Releasing lock "refresh_cache-31816c0c-d7d2-48db-9a87-a1e03c938a60" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.369077] env[65758]: DEBUG nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Received event network-vif-deleted-09d73b49-88a0-426f-915b-c6c03998738f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 872.369258] env[65758]: INFO nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Neutron deleted interface 09d73b49-88a0-426f-915b-c6c03998738f; detaching it from the instance and deleting it from the info cache [ 872.369444] env[65758]: DEBUG nova.network.neutron [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 872.379465] env[65758]: WARNING neutronclient.v2_0.client [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 872.380408] env[65758]: WARNING openstack [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 872.380963] env[65758]: WARNING openstack [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 872.407861] env[65758]: WARNING openstack [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 872.408404] env[65758]: WARNING openstack [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 872.445429] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660587, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.463600] env[65758]: DEBUG nova.network.neutron [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 872.598214] env[65758]: WARNING neutronclient.v2_0.client [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 872.599264] env[65758]: WARNING openstack [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 872.599769] env[65758]: WARNING openstack [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 872.660783] env[65758]: WARNING neutronclient.v2_0.client [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 872.660783] env[65758]: WARNING openstack [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 872.660915] env[65758]: WARNING openstack [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 872.697441] env[65758]: DEBUG nova.network.neutron [-] [instance: cca3e019-8e82-4473-8609-291703762a6e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 872.716313] env[65758]: INFO nova.compute.manager [-] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Took 1.38 seconds to deallocate network for instance. [ 872.721978] env[65758]: DEBUG nova.network.neutron [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Updating instance_info_cache with network_info: [{"id": "6feb5c74-32da-4591-acdf-492fb5e3aebb", "address": "fa:16:3e:55:3c:09", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6feb5c74-32", "ovs_interfaceid": "6feb5c74-32da-4591-acdf-492fb5e3aebb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 872.734018] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660588, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.775966] env[65758]: DEBUG nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 872.786972] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.787278] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.787853] env[65758]: DEBUG nova.compute.manager [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Going to confirm migration 3 {{(pid=65758) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 872.799582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.800121] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.800297] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.800743] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.801042] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.808213] env[65758]: INFO nova.compute.manager [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Terminating instance [ 872.823857] env[65758]: DEBUG nova.network.neutron [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updated VIF entry in instance network info cache for port 1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 872.823857] env[65758]: DEBUG nova.network.neutron [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updating instance_info_cache with network_info: [{"id": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "address": "fa:16:3e:f0:a2:8e", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb28e5b-cb", "ovs_interfaceid": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 872.867872] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660586, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.871987] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6513611-fe34-4913-ac9d-bb33dace82d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.882030] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641501db-3970-4735-8d8b-dd5dd93bbdf9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.922788] env[65758]: DEBUG nova.compute.manager [req-b5a2d87e-bd61-418f-82e7-98578a07d3ca req-75564414-e336-4df1-b67d-51bf326d57b0 service nova] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Detach interface failed, port_id=09d73b49-88a0-426f-915b-c6c03998738f, reason: Instance 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 872.939482] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660587, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.868383} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.939783] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 31816c0c-d7d2-48db-9a87-a1e03c938a60/31816c0c-d7d2-48db-9a87-a1e03c938a60.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.940098] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.940352] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb222c24-431e-4769-a6a7-f311ae6180b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.950658] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 872.950658] env[65758]: value = "task-4660589" [ 872.950658] env[65758]: _type = "Task" [ 872.950658] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.960829] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660589, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.154278] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquiring lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.154499] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.154698] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquiring lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.154872] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.155337] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.157560] env[65758]: INFO nova.compute.manager [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Terminating instance [ 873.201018] env[65758]: INFO nova.compute.manager [-] [instance: cca3e019-8e82-4473-8609-291703762a6e] Took 1.37 seconds to deallocate network for instance. [ 873.221907] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660588, 'name': ReconfigVM_Task, 'duration_secs': 0.523598} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.222155] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 454bd092-f683-4a3a-91c9-65191d6996f4/454bd092-f683-4a3a-91c9-65191d6996f4.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.222876] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59413a8f-b911-490e-ac0a-74d2bac90a3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.233840] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 873.233840] env[65758]: value = "task-4660590" [ 873.233840] env[65758]: _type = "Task" [ 873.233840] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.237329] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "refresh_cache-a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.237694] env[65758]: DEBUG nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Instance network_info: |[{"id": "6feb5c74-32da-4591-acdf-492fb5e3aebb", "address": "fa:16:3e:55:3c:09", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6feb5c74-32", "ovs_interfaceid": "6feb5c74-32da-4591-acdf-492fb5e3aebb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 873.238634] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.241432] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:3c:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a965790c-2d2f-4c2a-9ee7-745f4d53039b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6feb5c74-32da-4591-acdf-492fb5e3aebb', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.249093] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 873.251063] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.251705] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5faa888-7a2d-4d71-8731-394ac82b7761 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.276099] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660590, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.283185] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.283185] env[65758]: value = "task-4660591" [ 873.283185] env[65758]: _type = "Task" [ 873.283185] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.304097] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660591, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.305138] env[65758]: WARNING neutronclient.v2_0.client [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 873.316058] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.320080] env[65758]: DEBUG nova.compute.manager [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 873.320386] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.321969] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be91deff-9d68-4442-abbd-989070dbf9c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.327378] env[65758]: DEBUG oslo_concurrency.lockutils [req-b225c9d2-f0e6-4842-839a-abee1c599a31 req-112a4d2b-fb07-4dd0-ba30-e9db9e73b69c service nova] Releasing lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.331157] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.331491] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-796e17b1-cd3e-4cca-bb0a-19b4052a8373 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.342770] env[65758]: DEBUG oslo_vmware.api [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 873.342770] env[65758]: value = "task-4660592" [ 873.342770] env[65758]: _type = "Task" [ 873.342770] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.348377] env[65758]: WARNING neutronclient.v2_0.client [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 873.348727] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.348975] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.349249] env[65758]: DEBUG nova.network.neutron [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 873.349445] env[65758]: DEBUG nova.objects.instance [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lazy-loading 'info_cache' on Instance uuid a9ec9a64-94c7-41a5-a7a4-5e034ddfc592 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.363265] env[65758]: DEBUG oslo_vmware.api [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.366959] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660586, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.462240] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660589, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.273957} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.465303] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.466426] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9858065-edea-4f19-8bc6-86012d4ec93e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.491239] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 31816c0c-d7d2-48db-9a87-a1e03c938a60/31816c0c-d7d2-48db-9a87-a1e03c938a60.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.494678] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7db6e173-77cf-46fc-9fbd-198ec4aef080 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.518558] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 873.518558] env[65758]: value = "task-4660593" [ 873.518558] env[65758]: _type = "Task" [ 873.518558] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.533084] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660593, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.665446] env[65758]: DEBUG nova.compute.manager [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 873.665698] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.667014] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e8d28e-8190-4cdc-9c3d-a5eea37a7753 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.677733] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.678071] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fa6b735-2933-426a-bb1c-b1d8e1497adf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.688730] env[65758]: DEBUG oslo_vmware.api [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 873.688730] env[65758]: value = "task-4660594" [ 873.688730] env[65758]: _type = "Task" [ 873.688730] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.702332] env[65758]: DEBUG oslo_vmware.api [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.708711] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.746300] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660590, 'name': Rename_Task, 'duration_secs': 0.387098} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.750463] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.751180] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c9dd70e-58e9-4f92-9a4f-554ddd0c7265 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.761828] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 873.761828] env[65758]: value = "task-4660595" [ 873.761828] env[65758]: _type = "Task" [ 873.761828] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.778432] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.783187] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096fdfea-3690-4b6e-a77b-850f94678f6c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.797454] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660591, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.800162] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3ed81a-ec1c-4fcb-9e15-f8df1770466b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.835498] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c2dbc3-4228-4d93-94e5-015c12a2e003 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.847734] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc6a24f-f0e6-48f8-b26d-3601c0d5bea1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.876532] env[65758]: DEBUG nova.compute.provider_tree [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.878581] env[65758]: DEBUG oslo_vmware.api [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660592, 'name': PowerOffVM_Task, 'duration_secs': 0.294328} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.880016] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 873.880325] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 873.880725] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92636dcf-8348-42f8-8d20-62fbd455e914 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.888846] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660586, 'name': CreateSnapshot_Task, 'duration_secs': 1.777611} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.890937] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 873.893026] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ffaf06-ed3d-451f-9328-d1c06fde3431 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.952452] env[65758]: DEBUG nova.compute.manager [req-c5043ca8-12d4-41c4-9624-39ae2fbeb002 req-17ac7b14-3eb4-44a4-b2ff-e6cd89e25102 service nova] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Received event network-vif-deleted-8a825fae-1c70-4269-a958-4d8e821b6eeb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 874.005525] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.005848] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.006219] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Deleting the datastore file [datastore1] f15c6953-f76b-44eb-bd1b-c0d3adddc163 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.006379] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16b54650-ba2e-4127-998d-1367ba35e10b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.015126] env[65758]: DEBUG oslo_vmware.api [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for the task: (returnval){ [ 874.015126] env[65758]: value = "task-4660597" [ 874.015126] env[65758]: _type = "Task" [ 874.015126] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.028782] env[65758]: DEBUG oslo_vmware.api [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660597, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.032189] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660593, 'name': ReconfigVM_Task, 'duration_secs': 0.484838} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.032501] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 31816c0c-d7d2-48db-9a87-a1e03c938a60/31816c0c-d7d2-48db-9a87-a1e03c938a60.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.033301] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7738a092-1260-4e71-9fee-aec63e2c7890 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.042381] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 874.042381] env[65758]: value = "task-4660598" [ 874.042381] env[65758]: _type = "Task" [ 874.042381] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.052016] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660598, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.067677] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.067954] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.094063] env[65758]: DEBUG nova.compute.manager [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Received event network-changed-6feb5c74-32da-4591-acdf-492fb5e3aebb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 874.094063] env[65758]: DEBUG nova.compute.manager [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Refreshing instance network info cache due to event network-changed-6feb5c74-32da-4591-acdf-492fb5e3aebb. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 874.095026] env[65758]: DEBUG oslo_concurrency.lockutils [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] Acquiring lock "refresh_cache-a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.096233] env[65758]: DEBUG oslo_concurrency.lockutils [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] Acquired lock "refresh_cache-a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.096233] env[65758]: DEBUG nova.network.neutron [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Refreshing network info cache for port 6feb5c74-32da-4591-acdf-492fb5e3aebb {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 874.199600] env[65758]: DEBUG oslo_vmware.api [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660594, 'name': PowerOffVM_Task, 'duration_secs': 0.268738} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.199873] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.200030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.200306] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cabf6b72-0228-43f4-8c06-f97b1909b07b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.275909] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660595, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.277531] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.277812] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.278018] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Deleting the datastore file [datastore1] b7e2a3d9-7db3-40b3-98a5-c6e6e040a947 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.278321] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42644ecd-bd1a-47ed-900a-f9660b3fd4a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.287288] env[65758]: DEBUG oslo_vmware.api [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for the task: (returnval){ [ 874.287288] env[65758]: value = "task-4660600" [ 874.287288] env[65758]: _type = "Task" [ 874.287288] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.301203] env[65758]: DEBUG oslo_vmware.api [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.305738] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660591, 'name': CreateVM_Task, 'duration_secs': 0.516027} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.305738] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.305738] env[65758]: WARNING neutronclient.v2_0.client [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.306164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.306209] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.306583] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 874.306875] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce85ceed-e546-4775-bee4-71f6eba0d3d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.313389] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 874.313389] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520a741d-041f-692f-e66a-a57baf3ca886" [ 874.313389] env[65758]: _type = "Task" [ 874.313389] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.323584] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520a741d-041f-692f-e66a-a57baf3ca886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.381236] env[65758]: DEBUG nova.scheduler.client.report [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.385862] env[65758]: WARNING neutronclient.v2_0.client [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.386597] env[65758]: WARNING openstack [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 874.387037] env[65758]: WARNING openstack [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 874.416657] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 874.417693] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-206cdaa6-aaf5-4745-ac95-ddae4c0804a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.430606] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 874.430606] env[65758]: value = "task-4660601" [ 874.430606] env[65758]: _type = "Task" [ 874.430606] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.443259] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660601, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.529686] env[65758]: DEBUG oslo_vmware.api [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Task: {'id': task-4660597, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335278} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.529868] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.530171] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 874.530476] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 874.530864] env[65758]: INFO nova.compute.manager [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Took 1.21 seconds to destroy the instance on the hypervisor. [ 874.531173] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 874.534424] env[65758]: DEBUG nova.compute.manager [-] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 874.534541] env[65758]: DEBUG nova.network.neutron [-] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 874.534780] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.535364] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 874.535626] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 874.553672] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660598, 'name': Rename_Task, 'duration_secs': 0.282203} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.554069] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 874.554427] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27702520-897a-484f-976a-c9c844eb5597 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.563165] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 874.563165] env[65758]: value = "task-4660602" [ 874.563165] env[65758]: _type = "Task" [ 874.563165] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.580580] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.582276] env[65758]: WARNING neutronclient.v2_0.client [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.583199] env[65758]: WARNING openstack [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 874.583390] env[65758]: WARNING openstack [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 874.593851] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.594468] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.594703] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.594898] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.595065] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.595279] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.595453] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 874.595631] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.597874] env[65758]: WARNING neutronclient.v2_0.client [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.598575] env[65758]: WARNING openstack [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 874.599053] env[65758]: WARNING openstack [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 874.615930] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.715482] env[65758]: DEBUG nova.network.neutron [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance_info_cache with network_info: [{"id": "e31ffc86-5e08-405f-8129-6af1973003bf", "address": "fa:16:3e:88:0c:68", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31ffc86-5e", "ovs_interfaceid": "e31ffc86-5e08-405f-8129-6af1973003bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 874.777771] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660595, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.804436] env[65758]: DEBUG oslo_vmware.api [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Task: {'id': task-4660600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416353} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.804988] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.805225] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 874.805529] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 874.805785] env[65758]: INFO nova.compute.manager [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Took 1.14 seconds to destroy the instance on the hypervisor. [ 874.806162] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 874.806503] env[65758]: DEBUG nova.compute.manager [-] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 874.806566] env[65758]: DEBUG nova.network.neutron [-] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 874.807369] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.808454] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 874.808984] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 874.830103] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520a741d-041f-692f-e66a-a57baf3ca886, 'name': SearchDatastore_Task, 'duration_secs': 0.022486} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.831348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.831348] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.831348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.831348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.831518] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.831835] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-861b24ac-0c14-46af-bda7-8fb559fc33e1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.844101] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.844101] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 874.844827] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f61604-d10e-4dc8-b96a-3f6f8d38ff52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.852432] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 874.852432] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523dd5c7-2974-d713-5de4-01674b81431f" [ 874.852432] env[65758]: _type = "Task" [ 874.852432] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.863512] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523dd5c7-2974-d713-5de4-01674b81431f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.895726] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.782s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.896399] env[65758]: DEBUG nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 874.900230] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.903626] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.048s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.903752] env[65758]: DEBUG nova.objects.instance [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lazy-loading 'resources' on Instance uuid c1b9d81e-d747-4665-a083-26d8383f7645 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.945406] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660601, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.971367] env[65758]: WARNING neutronclient.v2_0.client [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 874.972908] env[65758]: WARNING openstack [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 874.972908] env[65758]: WARNING openstack [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 875.080773] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660602, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.098710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.101212] env[65758]: DEBUG nova.network.neutron [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Updated VIF entry in instance network info cache for port 6feb5c74-32da-4591-acdf-492fb5e3aebb. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 875.101888] env[65758]: DEBUG nova.network.neutron [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Updating instance_info_cache with network_info: [{"id": "6feb5c74-32da-4591-acdf-492fb5e3aebb", "address": "fa:16:3e:55:3c:09", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6feb5c74-32", "ovs_interfaceid": "6feb5c74-32da-4591-acdf-492fb5e3aebb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 875.222027] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.222027] env[65758]: DEBUG nova.objects.instance [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lazy-loading 'migration_context' on Instance uuid a9ec9a64-94c7-41a5-a7a4-5e034ddfc592 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.282024] env[65758]: DEBUG oslo_vmware.api [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660595, 'name': PowerOnVM_Task, 'duration_secs': 1.092629} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.282024] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.282024] env[65758]: INFO nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Took 10.70 seconds to spawn the instance on the hypervisor. [ 875.282024] env[65758]: DEBUG nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 875.282024] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e501e7-af4b-4fa5-8a26-09cab6696a6b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.366397] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523dd5c7-2974-d713-5de4-01674b81431f, 'name': SearchDatastore_Task, 'duration_secs': 0.01419} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.368057] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1512d579-e842-4a9a-895a-a468b386ae14 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.375212] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 875.375212] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f3f99e-09fb-3685-9e5f-17ddc2c0174d" [ 875.375212] env[65758]: _type = "Task" [ 875.375212] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.385449] env[65758]: DEBUG nova.network.neutron [-] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 875.387329] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f3f99e-09fb-3685-9e5f-17ddc2c0174d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.411118] env[65758]: DEBUG nova.compute.utils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 875.413491] env[65758]: DEBUG nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 875.413491] env[65758]: DEBUG nova.network.neutron [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 875.413633] env[65758]: WARNING neutronclient.v2_0.client [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 875.414346] env[65758]: WARNING neutronclient.v2_0.client [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 875.418333] env[65758]: WARNING openstack [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 875.418333] env[65758]: WARNING openstack [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 875.445334] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660601, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.470772] env[65758]: DEBUG nova.policy [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e83d5c8036694a28879117a073cbef80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f688ce091774c3fa77875b0aef79510', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 875.579566] env[65758]: DEBUG oslo_vmware.api [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660602, 'name': PowerOnVM_Task, 'duration_secs': 0.555907} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.579847] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.581120] env[65758]: INFO nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Took 7.72 seconds to spawn the instance on the hypervisor. [ 875.581120] env[65758]: DEBUG nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 875.584085] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db17541c-4ac9-4f53-a413-45101e07547a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.606284] env[65758]: DEBUG oslo_concurrency.lockutils [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] Releasing lock "refresh_cache-a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.606569] env[65758]: DEBUG nova.compute.manager [req-7dc4ed5e-d9c3-4a20-9b92-a7b10c3b8867 req-d2a0f6bb-a6d1-4694-a586-7b0fe2c8dcec service nova] [instance: cca3e019-8e82-4473-8609-291703762a6e] Received event network-vif-deleted-972faaf3-0ee4-4d20-a393-b48d940dbae2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 875.725701] env[65758]: DEBUG nova.objects.base [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 875.726693] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1b3c38-6140-440a-b5e4-414f22860186 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.762243] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91ab2c87-3455-4617-ba58-f74ef929c438 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.769387] env[65758]: DEBUG oslo_vmware.api [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 875.769387] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e9a24e-d6a0-3aea-a18f-3770fc3338c2" [ 875.769387] env[65758]: _type = "Task" [ 875.769387] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.779327] env[65758]: DEBUG oslo_vmware.api [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e9a24e-d6a0-3aea-a18f-3770fc3338c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.799206] env[65758]: INFO nova.compute.manager [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Took 54.31 seconds to build instance. [ 875.834738] env[65758]: DEBUG nova.network.neutron [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Successfully created port: c0e6c6d9-40f1-437d-b6c9-3cc445c43b56 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 875.891263] env[65758]: INFO nova.compute.manager [-] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Took 1.36 seconds to deallocate network for instance. [ 875.891598] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f3f99e-09fb-3685-9e5f-17ddc2c0174d, 'name': SearchDatastore_Task, 'duration_secs': 0.011765} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.893375] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.893628] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a/a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 875.899520] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84063e15-8531-4b65-bd77-38016bb061ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.910227] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 875.910227] env[65758]: value = "task-4660603" [ 875.910227] env[65758]: _type = "Task" [ 875.910227] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.920236] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.923703] env[65758]: DEBUG nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 875.926720] env[65758]: DEBUG nova.network.neutron [-] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 875.943810] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660601, 'name': CloneVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.041344] env[65758]: DEBUG nova.compute.manager [req-6f3b049e-ad00-4670-bcec-76fc8cade083 req-af3e765d-0104-453c-8e05-85bb29b71df8 service nova] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Received event network-vif-deleted-add59776-3d93-4cc4-8b79-045e84a073ff {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 876.109442] env[65758]: INFO nova.compute.manager [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Took 47.33 seconds to build instance. [ 876.138333] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c729a4c9-e6ea-42a2-b781-889f821d63f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.147709] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79a2ae6-379e-45aa-941d-4fe4439abd54 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.186407] env[65758]: DEBUG nova.compute.manager [req-6026b864-b71b-4a26-9a65-17220e450c86 req-87478b16-a4e1-4e84-a65f-66e473ae57a6 service nova] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Received event network-vif-deleted-98886eae-63b7-4cb2-a8dc-f86495b733a9 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 876.187429] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb77f153-a204-4080-bda3-728ed068772c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.197943] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c986fde-31da-4780-8bdb-6771bd03a15b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.215393] env[65758]: DEBUG nova.compute.provider_tree [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.283538] env[65758]: DEBUG oslo_vmware.api [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e9a24e-d6a0-3aea-a18f-3770fc3338c2, 'name': SearchDatastore_Task, 'duration_secs': 0.009131} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.283891] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.306658] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4761035d-823f-462a-b188-89c4e9db32a1 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "454bd092-f683-4a3a-91c9-65191d6996f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.653s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.404339] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.421360] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660603, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.433581] env[65758]: INFO nova.compute.manager [-] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Took 1.63 seconds to deallocate network for instance. [ 876.449129] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660601, 'name': CloneVM_Task, 'duration_secs': 1.582724} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.450276] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Created linked-clone VM from snapshot [ 876.450646] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c74a852-303c-4b4d-aee7-32ad3089c2cd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.460314] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Uploading image 9bca55e5-7256-4973-9126-580769839e32 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 876.485015] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 876.485015] env[65758]: value = "vm-909943" [ 876.485015] env[65758]: _type = "VirtualMachine" [ 876.485015] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 876.485625] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f34cb9e9-bbdd-4ac8-a41d-cec95a29c5d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.495659] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lease: (returnval){ [ 876.495659] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f172f1-5ce2-cee7-be21-ba57e2977e7b" [ 876.495659] env[65758]: _type = "HttpNfcLease" [ 876.495659] env[65758]: } obtained for exporting VM: (result){ [ 876.495659] env[65758]: value = "vm-909943" [ 876.495659] env[65758]: _type = "VirtualMachine" [ 876.495659] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 876.496046] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the lease: (returnval){ [ 876.496046] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f172f1-5ce2-cee7-be21-ba57e2977e7b" [ 876.496046] env[65758]: _type = "HttpNfcLease" [ 876.496046] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 876.505056] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 876.505056] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f172f1-5ce2-cee7-be21-ba57e2977e7b" [ 876.505056] env[65758]: _type = "HttpNfcLease" [ 876.505056] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 876.612158] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2aa1674b-87de-4874-ac46-60cafe8d0c50 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "31816c0c-d7d2-48db-9a87-a1e03c938a60" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.103s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.718947] env[65758]: DEBUG nova.scheduler.client.report [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 876.808201] env[65758]: DEBUG nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 876.922520] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.860253} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.922846] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a/a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 876.923081] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 876.923354] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-893a5a68-96f4-47ff-a1f8-1bace658cc02 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.932876] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 876.932876] env[65758]: value = "task-4660605" [ 876.932876] env[65758]: _type = "Task" [ 876.932876] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.938586] env[65758]: DEBUG nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 876.944610] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.948832] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660605, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.968581] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 876.968850] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 876.968992] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 876.969238] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 876.969439] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 876.969593] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 876.969806] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 876.969965] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 876.970150] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 876.970313] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 876.970485] env[65758]: DEBUG nova.virt.hardware [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 876.971482] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143ccead-40fe-43fc-87da-1ca028a70eb7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.980943] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88304b38-3e52-4cec-8083-2df4199989a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.004585] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 877.004585] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f172f1-5ce2-cee7-be21-ba57e2977e7b" [ 877.004585] env[65758]: _type = "HttpNfcLease" [ 877.004585] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 877.005155] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 877.005155] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f172f1-5ce2-cee7-be21-ba57e2977e7b" [ 877.005155] env[65758]: _type = "HttpNfcLease" [ 877.005155] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 877.006437] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e344ef-93cd-41e2-b1d8-ac2d3336c50d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.017333] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ec474-4cce-510b-dea2-30659a2423f6/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 877.017550] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ec474-4cce-510b-dea2-30659a2423f6/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 877.225103] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.322s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.228565] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.261s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.228787] env[65758]: DEBUG nova.objects.instance [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lazy-loading 'resources' on Instance uuid b7323030-4573-4af5-a19a-212a140d642a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 877.250889] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6261d939-1b82-4232-b275-ce1d989080f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.257117] env[65758]: INFO nova.scheduler.client.report [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleted allocations for instance c1b9d81e-d747-4665-a083-26d8383f7645 [ 877.329911] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.444704] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660605, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070512} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.445339] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.446249] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a945e59c-dcb6-48cb-9b32-64cbbfb57718 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.479722] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a/a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.481275] env[65758]: DEBUG nova.network.neutron [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Successfully updated port: c0e6c6d9-40f1-437d-b6c9-3cc445c43b56 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 877.485674] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2508ed9f-b4e8-4ac3-9bb9-05f5b5b41bde {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.512643] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 877.512643] env[65758]: value = "task-4660606" [ 877.512643] env[65758]: _type = "Task" [ 877.512643] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.524401] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660606, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.768973] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b9c9975d-4790-46c4-ae68-118fef690d14 tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "c1b9d81e-d747-4665-a083-26d8383f7645" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.461s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.006786] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquiring lock "refresh_cache-79c63944-c4c8-4c7c-bc42-3f958d737e66" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.006786] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquired lock "refresh_cache-79c63944-c4c8-4c7c-bc42-3f958d737e66" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.006786] env[65758]: DEBUG nova.network.neutron [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 878.030011] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660606, 'name': ReconfigVM_Task, 'duration_secs': 0.409044} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.030506] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Reconfigured VM instance instance-00000043 to attach disk [datastore2] a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a/a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.031449] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c093bd5-0ddf-449e-94fd-89525640158f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.041836] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 878.041836] env[65758]: value = "task-4660607" [ 878.041836] env[65758]: _type = "Task" [ 878.041836] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.058973] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660607, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.268015] env[65758]: DEBUG nova.compute.manager [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Received event network-vif-plugged-c0e6c6d9-40f1-437d-b6c9-3cc445c43b56 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 878.268489] env[65758]: DEBUG oslo_concurrency.lockutils [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Acquiring lock "79c63944-c4c8-4c7c-bc42-3f958d737e66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.268735] env[65758]: DEBUG oslo_concurrency.lockutils [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Lock "79c63944-c4c8-4c7c-bc42-3f958d737e66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.268911] env[65758]: DEBUG oslo_concurrency.lockutils [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Lock "79c63944-c4c8-4c7c-bc42-3f958d737e66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.269319] env[65758]: DEBUG nova.compute.manager [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] No waiting events found dispatching network-vif-plugged-c0e6c6d9-40f1-437d-b6c9-3cc445c43b56 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 878.269427] env[65758]: WARNING nova.compute.manager [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Received unexpected event network-vif-plugged-c0e6c6d9-40f1-437d-b6c9-3cc445c43b56 for instance with vm_state building and task_state spawning. [ 878.269537] env[65758]: DEBUG nova.compute.manager [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Received event network-changed-c0e6c6d9-40f1-437d-b6c9-3cc445c43b56 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 878.269694] env[65758]: DEBUG nova.compute.manager [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Refreshing instance network info cache due to event network-changed-c0e6c6d9-40f1-437d-b6c9-3cc445c43b56. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 878.269835] env[65758]: DEBUG oslo_concurrency.lockutils [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Acquiring lock "refresh_cache-79c63944-c4c8-4c7c-bc42-3f958d737e66" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.402497] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90522f53-5871-49a7-a15f-175bd456116d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.413271] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f741c27-3a15-4e4e-9953-755518c4336e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.445839] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bee271-2c2c-424b-b57e-f6c68c7e8f1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.458589] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80940dc6-4d99-4660-b2ae-4b38505ccea5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.478606] env[65758]: DEBUG nova.compute.provider_tree [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.510877] env[65758]: WARNING openstack [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 878.511286] env[65758]: WARNING openstack [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 878.551754] env[65758]: DEBUG nova.network.neutron [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 878.567921] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660607, 'name': Rename_Task, 'duration_secs': 0.15858} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.568207] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.568487] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43360d20-f50d-4327-bb91-0f09141bcb52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.580518] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 878.580518] env[65758]: value = "task-4660608" [ 878.580518] env[65758]: _type = "Task" [ 878.580518] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.592326] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.641028] env[65758]: WARNING neutronclient.v2_0.client [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 878.641761] env[65758]: WARNING openstack [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 878.642140] env[65758]: WARNING openstack [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 878.746323] env[65758]: DEBUG nova.network.neutron [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Updating instance_info_cache with network_info: [{"id": "c0e6c6d9-40f1-437d-b6c9-3cc445c43b56", "address": "fa:16:3e:75:c8:ac", "network": {"id": "2497f870-bb6f-4226-bc5a-c182d1b185c0", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-49599873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f688ce091774c3fa77875b0aef79510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0e6c6d9-40", "ovs_interfaceid": "c0e6c6d9-40f1-437d-b6c9-3cc445c43b56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 878.983386] env[65758]: DEBUG nova.scheduler.client.report [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 879.095650] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660608, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.253606] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Releasing lock "refresh_cache-79c63944-c4c8-4c7c-bc42-3f958d737e66" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.255825] env[65758]: DEBUG nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Instance network_info: |[{"id": "c0e6c6d9-40f1-437d-b6c9-3cc445c43b56", "address": "fa:16:3e:75:c8:ac", "network": {"id": "2497f870-bb6f-4226-bc5a-c182d1b185c0", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-49599873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f688ce091774c3fa77875b0aef79510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0e6c6d9-40", "ovs_interfaceid": "c0e6c6d9-40f1-437d-b6c9-3cc445c43b56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 879.256297] env[65758]: DEBUG oslo_concurrency.lockutils [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Acquired lock "refresh_cache-79c63944-c4c8-4c7c-bc42-3f958d737e66" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.256513] env[65758]: DEBUG nova.network.neutron [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Refreshing network info cache for port c0e6c6d9-40f1-437d-b6c9-3cc445c43b56 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 879.257924] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:c8:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0e6c6d9-40f1-437d-b6c9-3cc445c43b56', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.271134] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Creating folder: Project (3f688ce091774c3fa77875b0aef79510). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 879.272477] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3eab15be-f609-449b-9763-b55496c368ef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.290032] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Created folder: Project (3f688ce091774c3fa77875b0aef79510) in parent group-v909763. [ 879.290306] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Creating folder: Instances. Parent ref: group-v909944. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 879.290640] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d9c2f3f-fbcc-4b93-92f2-b208df2af09f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.304049] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Created folder: Instances in parent group-v909944. [ 879.304477] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 879.304716] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 879.304952] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04f4328a-1e89-4435-b39c-3d33c79afb26 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.331583] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.331583] env[65758]: value = "task-4660611" [ 879.331583] env[65758]: _type = "Task" [ 879.331583] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.340840] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660611, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.488971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.261s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.492508] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.114s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.492900] env[65758]: DEBUG nova.objects.instance [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lazy-loading 'resources' on Instance uuid 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.519317] env[65758]: INFO nova.scheduler.client.report [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Deleted allocations for instance b7323030-4573-4af5-a19a-212a140d642a [ 879.596821] env[65758]: DEBUG oslo_vmware.api [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660608, 'name': PowerOnVM_Task, 'duration_secs': 0.584729} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.597156] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.597351] env[65758]: INFO nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Took 8.77 seconds to spawn the instance on the hypervisor. [ 879.597523] env[65758]: DEBUG nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 879.598640] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c48423-2481-4a56-9caa-8d6cabd2943e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.773921] env[65758]: WARNING neutronclient.v2_0.client [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 879.775192] env[65758]: WARNING openstack [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 879.775760] env[65758]: WARNING openstack [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 879.843633] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660611, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.986735] env[65758]: WARNING neutronclient.v2_0.client [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 879.987415] env[65758]: WARNING openstack [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 879.987756] env[65758]: WARNING openstack [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 880.028852] env[65758]: DEBUG oslo_concurrency.lockutils [None req-766446e9-d471-4129-82e8-3e868140030c tempest-MultipleCreateTestJSON-325801015 tempest-MultipleCreateTestJSON-325801015-project-member] Lock "b7323030-4573-4af5-a19a-212a140d642a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.621s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.092710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.092856] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.092958] env[65758]: DEBUG nova.compute.manager [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 880.094179] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d3dc31-bab1-4aea-9f68-7e971a329b95 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.103274] env[65758]: DEBUG nova.network.neutron [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Updated VIF entry in instance network info cache for port c0e6c6d9-40f1-437d-b6c9-3cc445c43b56. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 880.103666] env[65758]: DEBUG nova.network.neutron [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Updating instance_info_cache with network_info: [{"id": "c0e6c6d9-40f1-437d-b6c9-3cc445c43b56", "address": "fa:16:3e:75:c8:ac", "network": {"id": "2497f870-bb6f-4226-bc5a-c182d1b185c0", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-49599873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f688ce091774c3fa77875b0aef79510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0e6c6d9-40", "ovs_interfaceid": "c0e6c6d9-40f1-437d-b6c9-3cc445c43b56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 880.106689] env[65758]: DEBUG nova.compute.manager [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 880.107331] env[65758]: DEBUG nova.objects.instance [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lazy-loading 'flavor' on Instance uuid 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.120943] env[65758]: INFO nova.compute.manager [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Took 45.27 seconds to build instance. [ 880.346376] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660611, 'name': CreateVM_Task, 'duration_secs': 0.590095} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.346619] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 880.347037] env[65758]: WARNING neutronclient.v2_0.client [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 880.347403] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.347544] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.347853] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 880.348133] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b27493c-a8bc-4426-8a1b-1eb4890171fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.353367] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 880.353367] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523cce99-bc77-4a47-4bfb-1ff7fd8147a8" [ 880.353367] env[65758]: _type = "Task" [ 880.353367] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.362186] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523cce99-bc77-4a47-4bfb-1ff7fd8147a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.496129] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8ef2e0-5807-4519-86b0-28bc6b53cb06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.505687] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b0241a-f90a-4eca-95a3-a38e1a520080 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.539617] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48773409-ee30-40f9-8b0e-4cb0e28afdfc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.548731] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38764a0d-3d35-469e-8a61-eee66a9f83c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.563364] env[65758]: DEBUG nova.compute.provider_tree [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.612878] env[65758]: DEBUG oslo_concurrency.lockutils [req-28d2e3e7-6f10-4d7e-9605-547028547a3e req-22183211-83d7-41ed-a172-31588ae0315c service nova] Releasing lock "refresh_cache-79c63944-c4c8-4c7c-bc42-3f958d737e66" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.622880] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fd005197-77fc-443c-a573-8b83af9f48e8 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.966s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.865529] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523cce99-bc77-4a47-4bfb-1ff7fd8147a8, 'name': SearchDatastore_Task, 'duration_secs': 0.035179} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.865890] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.866157] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.866388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.866520] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.866693] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.866982] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94beadf1-f1f7-4111-b1db-a5a71a30492b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.880127] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.880415] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.881500] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-facbeb2e-d48e-400a-99d7-d2aee0b7fdad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.889633] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 880.889633] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52858c60-e386-e7c4-f66b-b0e8e05b90fb" [ 880.889633] env[65758]: _type = "Task" [ 880.889633] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.899323] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52858c60-e386-e7c4-f66b-b0e8e05b90fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.069018] env[65758]: DEBUG nova.scheduler.client.report [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 881.117515] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.117850] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-335b3aa8-7d47-41de-8885-fd8976316d09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.128774] env[65758]: DEBUG oslo_vmware.api [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 881.128774] env[65758]: value = "task-4660612" [ 881.128774] env[65758]: _type = "Task" [ 881.128774] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.138493] env[65758]: DEBUG oslo_vmware.api [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660612, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.406694] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52858c60-e386-e7c4-f66b-b0e8e05b90fb, 'name': SearchDatastore_Task, 'duration_secs': 0.012601} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.407586] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91adae10-a721-4bd0-a86b-841c1501580f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.415415] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 881.415415] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522bba7d-99a9-b28b-6809-7847e6288b29" [ 881.415415] env[65758]: _type = "Task" [ 881.415415] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.424790] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522bba7d-99a9-b28b-6809-7847e6288b29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.573551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.081s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.576358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.824s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.579573] env[65758]: INFO nova.compute.claims [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.606279] env[65758]: INFO nova.scheduler.client.report [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Deleted allocations for instance 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5 [ 881.642361] env[65758]: DEBUG oslo_vmware.api [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660612, 'name': PowerOffVM_Task, 'duration_secs': 0.246815} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.642741] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.642942] env[65758]: DEBUG nova.compute.manager [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 881.643830] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62f2103-9cd0-42c8-9415-89e2afd0c348 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.928381] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522bba7d-99a9-b28b-6809-7847e6288b29, 'name': SearchDatastore_Task, 'duration_secs': 0.014548} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.928690] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.929793] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 79c63944-c4c8-4c7c-bc42-3f958d737e66/79c63944-c4c8-4c7c-bc42-3f958d737e66.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 881.930782] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4289b47f-f334-4ea9-8d31-f3ce35eadff7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.939779] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 881.939779] env[65758]: value = "task-4660613" [ 881.939779] env[65758]: _type = "Task" [ 881.939779] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.954419] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.117861] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c10f39c1-71a9-494b-9810-a2ed9e3a3549 tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "548edde0-9e42-4cd3-bdd3-3615ab9b7fc5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.988s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.161264] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b372c98e-0d80-4f6e-b934-15c72ce5b30b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.068s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.451786] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660613, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.562766] env[65758]: DEBUG nova.compute.manager [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 882.563837] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0b0c99-eb06-4945-9fd7-e03a43b04a48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.924218] env[65758]: DEBUG nova.objects.instance [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lazy-loading 'flavor' on Instance uuid 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 882.951646] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616378} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.955175] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 79c63944-c4c8-4c7c-bc42-3f958d737e66/79c63944-c4c8-4c7c-bc42-3f958d737e66.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 882.955434] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 882.956491] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a38342e1-69d3-4cfd-9e61-a38e8e1ced04 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.966258] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 882.966258] env[65758]: value = "task-4660614" [ 882.966258] env[65758]: _type = "Task" [ 882.966258] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.979506] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.034837] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "d60aaa5c-913f-4550-a4d5-ab994048da9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.035301] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "d60aaa5c-913f-4550-a4d5-ab994048da9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.035371] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "d60aaa5c-913f-4550-a4d5-ab994048da9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.035588] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "d60aaa5c-913f-4550-a4d5-ab994048da9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.035789] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "d60aaa5c-913f-4550-a4d5-ab994048da9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.042520] env[65758]: INFO nova.compute.manager [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Terminating instance [ 883.077452] env[65758]: INFO nova.compute.manager [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] instance snapshotting [ 883.085984] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35e5901-2a0b-408d-94d3-ff7a9a04e92f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.108648] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d137a4ca-1df9-4a28-b458-f3980c82a0cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.112122] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21541ec7-3004-4488-b4f0-2686e4d2dad3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.122932] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152b1e90-b119-4f77-81d2-c50e2c9e5323 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.167184] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bb87e9-5799-4a6e-9aa9-47ab5e8fe9d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.178033] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a3b13d-ec0c-44ea-9c12-527077b08b73 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.194149] env[65758]: DEBUG nova.compute.provider_tree [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.429388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.429569] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquired lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.429736] env[65758]: DEBUG nova.network.neutron [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 883.429910] env[65758]: DEBUG nova.objects.instance [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lazy-loading 'info_cache' on Instance uuid 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.481328] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086767} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.483299] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 883.483299] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60776cb3-f3ef-4aaa-83a9-24aaaa4f29c7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.507875] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 79c63944-c4c8-4c7c-bc42-3f958d737e66/79c63944-c4c8-4c7c-bc42-3f958d737e66.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.508274] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0c55ce9-262b-429b-a61c-69c01dcd29d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.530949] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 883.530949] env[65758]: value = "task-4660615" [ 883.530949] env[65758]: _type = "Task" [ 883.530949] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.540392] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660615, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.549282] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "refresh_cache-d60aaa5c-913f-4550-a4d5-ab994048da9f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.549397] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquired lock "refresh_cache-d60aaa5c-913f-4550-a4d5-ab994048da9f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.549570] env[65758]: DEBUG nova.network.neutron [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 883.631832] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 883.632769] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f3d64cb4-5341-42fe-b8e0-6ad9f0d0dfc2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.641734] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 883.641734] env[65758]: value = "task-4660616" [ 883.641734] env[65758]: _type = "Task" [ 883.641734] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.650899] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660616, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.698167] env[65758]: DEBUG nova.scheduler.client.report [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.935120] env[65758]: DEBUG nova.objects.base [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Object Instance<3a7d0c08-9de6-47f4-a0c3-871458ccc4e3> lazy-loaded attributes: flavor,info_cache {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 884.045641] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660615, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.052752] env[65758]: WARNING neutronclient.v2_0.client [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 884.053925] env[65758]: WARNING openstack [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 884.054076] env[65758]: WARNING openstack [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 884.098629] env[65758]: DEBUG nova.network.neutron [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 884.155493] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660616, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.179037] env[65758]: DEBUG nova.network.neutron [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 884.203385] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.203786] env[65758]: DEBUG nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 884.206857] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 33.626s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.438271] env[65758]: WARNING neutronclient.v2_0.client [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 884.439089] env[65758]: WARNING openstack [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 884.439445] env[65758]: WARNING openstack [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 884.543741] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660615, 'name': ReconfigVM_Task, 'duration_secs': 0.703757} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.543741] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 79c63944-c4c8-4c7c-bc42-3f958d737e66/79c63944-c4c8-4c7c-bc42-3f958d737e66.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 884.544421] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa4f11fc-112a-4fd6-a937-022ab6167ee4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.552757] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 884.552757] env[65758]: value = "task-4660617" [ 884.552757] env[65758]: _type = "Task" [ 884.552757] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.567216] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660617, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.652479] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660616, 'name': CreateSnapshot_Task, 'duration_secs': 0.876032} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.652771] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 884.653651] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c7bfe6-7bb2-4325-928c-adc76fb1d4b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.680966] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Releasing lock "refresh_cache-d60aaa5c-913f-4550-a4d5-ab994048da9f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.681316] env[65758]: DEBUG nova.compute.manager [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 884.681509] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.682883] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e386c795-1f21-4566-8d66-9e67c1bc30e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.691825] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.692370] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b673876-32b2-4771-8c49-62cfe6aabbd4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.696734] env[65758]: WARNING neutronclient.v2_0.client [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 884.696734] env[65758]: WARNING openstack [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 884.696734] env[65758]: WARNING openstack [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 884.713433] env[65758]: DEBUG nova.compute.utils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 884.715278] env[65758]: DEBUG nova.objects.instance [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lazy-loading 'migration_context' on Instance uuid 105c53ce-e657-4a29-bc7f-96b4f885707a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 884.716454] env[65758]: DEBUG oslo_vmware.api [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 884.716454] env[65758]: value = "task-4660618" [ 884.716454] env[65758]: _type = "Task" [ 884.716454] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.717063] env[65758]: DEBUG nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 884.717268] env[65758]: DEBUG nova.network.neutron [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 884.717587] env[65758]: WARNING neutronclient.v2_0.client [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 884.717890] env[65758]: WARNING neutronclient.v2_0.client [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 884.718534] env[65758]: WARNING openstack [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 884.718881] env[65758]: WARNING openstack [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 884.739406] env[65758]: DEBUG oslo_vmware.api [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.778998] env[65758]: DEBUG nova.policy [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3338c19613c041abb681fa6cc661652a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e114eef3998848699a9a086fee86db29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 884.850044] env[65758]: DEBUG nova.network.neutron [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Updating instance_info_cache with network_info: [{"id": "31402f5e-3e8a-4ff8-a2b3-4b5992fb142a", "address": "fa:16:3e:ec:b2:6e", "network": {"id": "b8020aea-ddd1-4c96-b0aa-7114e6e3af1d", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1206359833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed522365ca465f90708212bdb65510", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31402f5e-3e", "ovs_interfaceid": "31402f5e-3e8a-4ff8-a2b3-4b5992fb142a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 885.064466] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660617, 'name': Rename_Task, 'duration_secs': 0.19742} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.064466] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.064975] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7aa02a50-a06e-41a2-957c-bdfd7cf63e16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.073031] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 885.073031] env[65758]: value = "task-4660619" [ 885.073031] env[65758]: _type = "Task" [ 885.073031] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.082459] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660619, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.152055] env[65758]: DEBUG nova.network.neutron [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Successfully created port: f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 885.175612] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 885.175961] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-23e4a84b-baa2-4ac9-bcc0-77d77b248edf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.187395] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 885.187395] env[65758]: value = "task-4660620" [ 885.187395] env[65758]: _type = "Task" [ 885.187395] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.198501] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660620, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.217497] env[65758]: DEBUG nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 885.243585] env[65758]: DEBUG oslo_vmware.api [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660618, 'name': PowerOffVM_Task, 'duration_secs': 0.181238} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.243911] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.244071] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.244361] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47f960fe-98b4-48dd-a670-03ed07b2f4bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.286071] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.288021] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.288021] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Deleting the datastore file [datastore1] d60aaa5c-913f-4550-a4d5-ab994048da9f {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.288021] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-874b0683-16d0-4a38-b51d-73e5ecb73de7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.296182] env[65758]: DEBUG oslo_vmware.api [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for the task: (returnval){ [ 885.296182] env[65758]: value = "task-4660622" [ 885.296182] env[65758]: _type = "Task" [ 885.296182] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.312047] env[65758]: DEBUG oslo_vmware.api [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.354125] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Releasing lock "refresh_cache-3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.588696] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660619, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.708211] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660620, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.814444] env[65758]: DEBUG oslo_vmware.api [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Task: {'id': task-4660622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195372} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.814789] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.815063] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.815398] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.815661] env[65758]: INFO nova.compute.manager [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 885.816246] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 885.816246] env[65758]: DEBUG nova.compute.manager [-] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 885.816448] env[65758]: DEBUG nova.network.neutron [-] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 885.816742] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 885.817657] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 885.818105] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 885.860817] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d032bd-885a-403d-bb00-e2c23f5db173 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.872507] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bebbf5a-a1f1-4bed-a946-a565a523efd9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.877055] env[65758]: DEBUG nova.network.neutron [-] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 885.877055] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 885.909393] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8912b9d-7808-4d23-87bb-c06c07acfc64 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.918963] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4653752-a526-476b-b837-f48dfb8105f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.937509] env[65758]: DEBUG nova.compute.provider_tree [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.084596] env[65758]: DEBUG oslo_vmware.api [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660619, 'name': PowerOnVM_Task, 'duration_secs': 0.698923} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.085112] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.085112] env[65758]: INFO nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Took 9.15 seconds to spawn the instance on the hypervisor. [ 886.085285] env[65758]: DEBUG nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 886.086117] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2001dd-9996-48dd-bfaa-c98e85661bb0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.199439] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660620, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.230949] env[65758]: DEBUG nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 886.259805] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 886.260124] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 886.260394] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 886.260487] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 886.260696] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 886.260878] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 886.261196] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 886.261401] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 886.261578] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 886.261740] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 886.261951] env[65758]: DEBUG nova.virt.hardware [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 886.262962] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd46dc11-70d0-4fd4-a07f-858ca10856cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.272282] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d8da1e-5697-412c-901b-27b28c207c97 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.365300] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.365656] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6083878c-adcf-45b3-9490-ded810ad4278 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.375738] env[65758]: DEBUG oslo_vmware.api [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 886.375738] env[65758]: value = "task-4660623" [ 886.375738] env[65758]: _type = "Task" [ 886.375738] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.379151] env[65758]: DEBUG nova.network.neutron [-] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 886.389060] env[65758]: DEBUG oslo_vmware.api [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660623, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.441666] env[65758]: DEBUG nova.scheduler.client.report [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 886.608725] env[65758]: INFO nova.compute.manager [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Took 45.32 seconds to build instance. [ 886.700371] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ec474-4cce-510b-dea2-30659a2423f6/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 886.701868] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2338bcd6-8554-4994-a57e-d573088a27d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.713132] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660620, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.715499] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ec474-4cce-510b-dea2-30659a2423f6/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 886.715499] env[65758]: ERROR oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ec474-4cce-510b-dea2-30659a2423f6/disk-0.vmdk due to incomplete transfer. [ 886.715749] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-53e72ca1-b68c-4d96-8cfc-ec7d2540b201 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.726714] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ec474-4cce-510b-dea2-30659a2423f6/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 886.727092] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Uploaded image 9bca55e5-7256-4973-9126-580769839e32 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 886.730527] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 886.730527] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a1a7c256-93fd-42d4-829e-7380d783fc1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.739539] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 886.739539] env[65758]: value = "task-4660624" [ 886.739539] env[65758]: _type = "Task" [ 886.739539] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.749963] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660624, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.813143] env[65758]: DEBUG nova.network.neutron [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Successfully updated port: f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 886.882552] env[65758]: INFO nova.compute.manager [-] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Took 1.07 seconds to deallocate network for instance. [ 886.887949] env[65758]: DEBUG oslo_vmware.api [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660623, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.893522] env[65758]: DEBUG nova.compute.manager [req-bf364d17-19b4-4c99-a098-cf5200e37ea6 req-8035bd79-586f-4862-b874-80e5bf011f78 service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Received event network-vif-plugged-f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 886.893765] env[65758]: DEBUG oslo_concurrency.lockutils [req-bf364d17-19b4-4c99-a098-cf5200e37ea6 req-8035bd79-586f-4862-b874-80e5bf011f78 service nova] Acquiring lock "df46c28d-7cbd-490e-8db2-9730e4d9f953-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.893962] env[65758]: DEBUG oslo_concurrency.lockutils [req-bf364d17-19b4-4c99-a098-cf5200e37ea6 req-8035bd79-586f-4862-b874-80e5bf011f78 service nova] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.894182] env[65758]: DEBUG oslo_concurrency.lockutils [req-bf364d17-19b4-4c99-a098-cf5200e37ea6 req-8035bd79-586f-4862-b874-80e5bf011f78 service nova] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.894353] env[65758]: DEBUG nova.compute.manager [req-bf364d17-19b4-4c99-a098-cf5200e37ea6 req-8035bd79-586f-4862-b874-80e5bf011f78 service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] No waiting events found dispatching network-vif-plugged-f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 886.894572] env[65758]: WARNING nova.compute.manager [req-bf364d17-19b4-4c99-a098-cf5200e37ea6 req-8035bd79-586f-4862-b874-80e5bf011f78 service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Received unexpected event network-vif-plugged-f597ff54-9371-4703-893c-3b7ad96d394d for instance with vm_state building and task_state spawning. [ 887.112886] env[65758]: DEBUG oslo_concurrency.lockutils [None req-755a6986-1a91-41a1-aa27-a56e1f96eae3 tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "79c63944-c4c8-4c7c-bc42-3f958d737e66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.500s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.202900] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660620, 'name': CloneVM_Task, 'duration_secs': 1.960485} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.204088] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Created linked-clone VM from snapshot [ 887.204187] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062413ed-6d01-4dfb-993e-07a66e8a7870 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.213270] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Uploading image d5908753-56d9-4732-af98-6ebe71ff0db2 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 887.225358] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 887.226029] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ba0178aa-6f52-49f7-97a7-98c20b48e403 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.236528] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 887.236528] env[65758]: value = "task-4660625" [ 887.236528] env[65758]: _type = "Task" [ 887.236528] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.252368] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660625, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.256376] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660624, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.318057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.318057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.318057] env[65758]: DEBUG nova.network.neutron [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 887.387846] env[65758]: DEBUG oslo_vmware.api [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660623, 'name': PowerOnVM_Task, 'duration_secs': 0.903778} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.388158] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.388363] env[65758]: DEBUG nova.compute.manager [None req-a3d4efda-99ab-4394-89fd-3b0aa66fa71b tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 887.389215] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a960149b-18b6-4185-93ce-73fb3c6dc241 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.397207] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.454821] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.248s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.461422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.259s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.463101] env[65758]: INFO nova.compute.claims [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.617631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquiring lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.618362] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.618362] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquiring lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.618362] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.618571] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.621251] env[65758]: INFO nova.compute.manager [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Terminating instance [ 887.752099] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660625, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.755626] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660624, 'name': Destroy_Task, 'duration_secs': 0.676255} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.755914] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Destroyed the VM [ 887.756175] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 887.756451] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-546ec5e0-b46a-4205-b5bd-fe432be41809 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.765136] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 887.765136] env[65758]: value = "task-4660626" [ 887.765136] env[65758]: _type = "Task" [ 887.765136] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.777516] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660626, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.821332] env[65758]: WARNING openstack [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 887.821605] env[65758]: WARNING openstack [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 887.865954] env[65758]: DEBUG nova.network.neutron [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 887.956257] env[65758]: WARNING neutronclient.v2_0.client [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 887.956983] env[65758]: WARNING openstack [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 887.957330] env[65758]: WARNING openstack [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 888.061540] env[65758]: DEBUG nova.network.neutron [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updating instance_info_cache with network_info: [{"id": "f597ff54-9371-4703-893c-3b7ad96d394d", "address": "fa:16:3e:cc:36:a7", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf597ff54-93", "ovs_interfaceid": "f597ff54-9371-4703-893c-3b7ad96d394d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 888.114945] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquiring lock "79c63944-c4c8-4c7c-bc42-3f958d737e66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.115375] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "79c63944-c4c8-4c7c-bc42-3f958d737e66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.115605] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquiring lock "79c63944-c4c8-4c7c-bc42-3f958d737e66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.115785] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "79c63944-c4c8-4c7c-bc42-3f958d737e66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.116016] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "79c63944-c4c8-4c7c-bc42-3f958d737e66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.118249] env[65758]: INFO nova.compute.manager [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Terminating instance [ 888.125961] env[65758]: DEBUG nova.compute.manager [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 888.125961] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.126954] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9303dc2-0473-43ce-85c3-8d3de4bfa4d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.136361] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.136680] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d276896d-c9ed-48c0-989a-07968bb12b5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.145135] env[65758]: DEBUG oslo_vmware.api [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 888.145135] env[65758]: value = "task-4660627" [ 888.145135] env[65758]: _type = "Task" [ 888.145135] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.155145] env[65758]: DEBUG oslo_vmware.api [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660627, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.253292] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660625, 'name': Destroy_Task, 'duration_secs': 0.667404} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.253783] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Destroyed the VM [ 888.253888] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 888.254243] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3fc7f9ee-40a3-4d4d-a77d-e6727f2e2e48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.265497] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 888.265497] env[65758]: value = "task-4660628" [ 888.265497] env[65758]: _type = "Task" [ 888.265497] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.280216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "96103549-80a5-462d-9f73-f5f6363ab9fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.280216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "96103549-80a5-462d-9f73-f5f6363ab9fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.280216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "96103549-80a5-462d-9f73-f5f6363ab9fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.280216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "96103549-80a5-462d-9f73-f5f6363ab9fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.280611] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "96103549-80a5-462d-9f73-f5f6363ab9fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.290204] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660626, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.290574] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660628, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.291267] env[65758]: INFO nova.compute.manager [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Terminating instance [ 888.564589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.565166] env[65758]: DEBUG nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Instance network_info: |[{"id": "f597ff54-9371-4703-893c-3b7ad96d394d", "address": "fa:16:3e:cc:36:a7", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf597ff54-93", "ovs_interfaceid": "f597ff54-9371-4703-893c-3b7ad96d394d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 888.568792] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:36:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f597ff54-9371-4703-893c-3b7ad96d394d', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.580681] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 888.585221] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.585806] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e89135f-cc7e-4cff-9a96-7cce63824da3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.622187] env[65758]: DEBUG nova.compute.manager [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 888.622548] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.629489] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd08dc31-182c-4792-9a04-3a34600b5b61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.633395] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.633395] env[65758]: value = "task-4660629" [ 888.633395] env[65758]: _type = "Task" [ 888.633395] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.647492] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.651926] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dacb4623-434c-4e8a-a487-cd6ef4602623 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.660528] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660629, 'name': CreateVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.671940] env[65758]: DEBUG oslo_vmware.api [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660627, 'name': PowerOffVM_Task, 'duration_secs': 0.322116} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.674259] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.674452] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 888.674822] env[65758]: DEBUG oslo_vmware.api [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 888.674822] env[65758]: value = "task-4660630" [ 888.674822] env[65758]: _type = "Task" [ 888.674822] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.681049] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8bb2c37-7a1f-45dd-a3c9-209967a7a0ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.691771] env[65758]: DEBUG oslo_vmware.api [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660630, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.778888] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.778888] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.779126] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Deleting the datastore file [datastore2] 47bb5b02-4f84-468e-ad46-2c1c96b65c97 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.779838] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b30f28b-2d97-4b6a-b61e-ddcc26742b65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.791048] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660628, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.796379] env[65758]: DEBUG oslo_vmware.api [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for the task: (returnval){ [ 888.796379] env[65758]: value = "task-4660632" [ 888.796379] env[65758]: _type = "Task" [ 888.796379] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.796729] env[65758]: DEBUG oslo_vmware.api [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660626, 'name': RemoveSnapshot_Task, 'duration_secs': 0.890279} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.797904] env[65758]: DEBUG nova.compute.manager [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 888.797904] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.800687] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 888.800968] env[65758]: INFO nova.compute.manager [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Took 17.53 seconds to snapshot the instance on the hypervisor. [ 888.804217] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59916ef-5f08-41c8-adba-df11e92d5906 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.825346] env[65758]: DEBUG oslo_vmware.api [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.827065] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 888.830152] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f26673a2-4588-44d9-ba8d-b1736507a4ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.934736] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.934956] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.935234] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleting the datastore file [datastore1] 96103549-80a5-462d-9f73-f5f6363ab9fc {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.935919] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1008cf9-a8b6-431f-8bec-ca59a4b0f91a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.946725] env[65758]: DEBUG oslo_vmware.api [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 888.946725] env[65758]: value = "task-4660634" [ 888.946725] env[65758]: _type = "Task" [ 888.946725] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.960804] env[65758]: DEBUG oslo_vmware.api [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660634, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.019821] env[65758]: INFO nova.compute.manager [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Swapping old allocation on dict_keys(['0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51']) held by migration 37e0bba4-7690-4c4c-9e66-0b8b93f50a0f for instance [ 889.067532] env[65758]: DEBUG nova.scheduler.client.report [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Overwriting current allocation {'allocations': {'0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 90}}, 'project_id': 'cdaabf2897064b5a948dbdb6d5921d76', 'user_id': '8f16c6fa73284e8696df370f862e6366', 'consumer_generation': 1} on consumer 105c53ce-e657-4a29-bc7f-96b4f885707a {{(pid=65758) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 889.150377] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660629, 'name': CreateVM_Task, 'duration_secs': 0.429507} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.150755] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.151343] env[65758]: WARNING neutronclient.v2_0.client [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.151741] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.151950] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.152556] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 889.152681] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbd3fff2-75e0-4a25-82f2-90839a2bae4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.156133] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.163810] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 889.163810] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5289fc53-2e53-25a3-9394-a99c37aa5e6e" [ 889.163810] env[65758]: _type = "Task" [ 889.163810] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.176453] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5289fc53-2e53-25a3-9394-a99c37aa5e6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.179133] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc8e5bc-0806-4785-a3b3-f79e32cee93f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.196661] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c0fd47-fcca-47e8-b75c-3a2e1458fef4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.199490] env[65758]: DEBUG oslo_vmware.api [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660630, 'name': PowerOffVM_Task, 'duration_secs': 0.281452} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.199819] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 889.199980] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 889.200680] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c019bbe-7102-46b4-9163-a4517fc3b4c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.204551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.204721] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquired lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.204913] env[65758]: DEBUG nova.network.neutron [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 889.239129] env[65758]: DEBUG nova.compute.manager [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Received event network-changed-f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 889.239505] env[65758]: DEBUG nova.compute.manager [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Refreshing instance network info cache due to event network-changed-f597ff54-9371-4703-893c-3b7ad96d394d. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 889.239858] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] Acquiring lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.240129] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] Acquired lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.240378] env[65758]: DEBUG nova.network.neutron [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Refreshing network info cache for port f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 889.244294] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60a8cf1-8b00-4484-8063-2cb32297a8e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.256103] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b4a487-6a74-476e-b3df-dcc21d30a8da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.272653] env[65758]: DEBUG nova.compute.provider_tree [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.288432] env[65758]: DEBUG oslo_vmware.api [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660628, 'name': RemoveSnapshot_Task, 'duration_secs': 0.897827} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.291108] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 889.291373] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 889.291677] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Deleting the datastore file [datastore1] 79c63944-c4c8-4c7c-bc42-3f958d737e66 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 889.292136] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 889.296242] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42fa8df1-0805-4ba0-9032-0aa9abf586db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.308531] env[65758]: DEBUG oslo_vmware.api [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for the task: (returnval){ [ 889.308531] env[65758]: value = "task-4660636" [ 889.308531] env[65758]: _type = "Task" [ 889.308531] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.310241] env[65758]: DEBUG nova.compute.manager [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Instance disappeared during snapshot {{(pid=65758) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4635}} [ 889.324932] env[65758]: DEBUG oslo_vmware.api [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Task: {'id': task-4660632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262676} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.327025] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.327025] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.327025] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.327345] env[65758]: INFO nova.compute.manager [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Took 1.20 seconds to destroy the instance on the hypervisor. [ 889.327689] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 889.328452] env[65758]: DEBUG nova.compute.manager [-] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 889.328619] env[65758]: DEBUG nova.network.neutron [-] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 889.328945] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.330063] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 889.330063] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 889.342411] env[65758]: DEBUG nova.compute.manager [None req-1b6229e1-503e-41ce-93aa-d76cf3a64a5a tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image not found during clean up 9bca55e5-7256-4973-9126-580769839e32 {{(pid=65758) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4641}} [ 889.344524] env[65758]: DEBUG oslo_vmware.api [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.377305] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.466579] env[65758]: DEBUG oslo_vmware.api [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660634, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236683} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.466961] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.467239] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.467495] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.467737] env[65758]: INFO nova.compute.manager [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Took 0.67 seconds to destroy the instance on the hypervisor. [ 889.468115] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 889.469028] env[65758]: DEBUG nova.compute.manager [-] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 889.469028] env[65758]: DEBUG nova.network.neutron [-] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 889.469028] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.471530] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 889.471530] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 889.524345] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "1ff48e58-9240-466d-bec4-51394e550c34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.524584] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "1ff48e58-9240-466d-bec4-51394e550c34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.526564] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.676867] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5289fc53-2e53-25a3-9394-a99c37aa5e6e, 'name': SearchDatastore_Task, 'duration_secs': 0.012341} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.677202] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.677433] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 889.677736] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.677814] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.677966] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.678255] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-691e25a3-42fb-414c-8d03-4e02a24bd683 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.688434] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.688434] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 889.689331] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c99d9f3-4938-433d-bb44-efdb85412ce0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.695817] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 889.695817] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d94734-eb22-ca22-4560-4866d62d9871" [ 889.695817] env[65758]: _type = "Task" [ 889.695817] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.705626] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d94734-eb22-ca22-4560-4866d62d9871, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.744439] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.745703] env[65758]: WARNING openstack [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 889.745821] env[65758]: WARNING openstack [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 889.754276] env[65758]: WARNING neutronclient.v2_0.client [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.754921] env[65758]: WARNING openstack [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 889.755337] env[65758]: WARNING openstack [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 889.779967] env[65758]: DEBUG nova.scheduler.client.report [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.801898] env[65758]: WARNING nova.compute.manager [None req-0dadaeb3-534f-4451-a428-34dd1b09efcb tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Image not found during snapshot: nova.exception.ImageNotFound: Image d5908753-56d9-4732-af98-6ebe71ff0db2 could not be found. [ 889.821234] env[65758]: DEBUG oslo_vmware.api [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Task: {'id': task-4660636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276269} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.821499] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.821678] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.821853] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.822040] env[65758]: INFO nova.compute.manager [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Took 1.20 seconds to destroy the instance on the hypervisor. [ 889.822286] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 889.822794] env[65758]: DEBUG nova.compute.manager [-] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 889.822853] env[65758]: DEBUG nova.network.neutron [-] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 889.823136] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 889.823665] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 889.823972] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 889.946227] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.030305] env[65758]: DEBUG nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 890.189607] env[65758]: WARNING neutronclient.v2_0.client [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.190287] env[65758]: WARNING openstack [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 890.190725] env[65758]: WARNING openstack [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 890.209353] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d94734-eb22-ca22-4560-4866d62d9871, 'name': SearchDatastore_Task, 'duration_secs': 0.010227} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.210526] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a88051da-e1c8-4223-b169-4c9cc37499b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.217515] env[65758]: WARNING neutronclient.v2_0.client [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.219216] env[65758]: WARNING openstack [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 890.219710] env[65758]: WARNING openstack [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 890.231209] env[65758]: DEBUG nova.network.neutron [-] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 890.232334] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 890.232334] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529ee020-1077-fce8-4490-7a082e3a4e8d" [ 890.232334] env[65758]: _type = "Task" [ 890.232334] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.242680] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529ee020-1077-fce8-4490-7a082e3a4e8d, 'name': SearchDatastore_Task, 'duration_secs': 0.010791} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.242988] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.243268] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] df46c28d-7cbd-490e-8db2-9730e4d9f953/df46c28d-7cbd-490e-8db2-9730e4d9f953.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 890.243562] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80d32957-cef4-495b-b85b-9d5f58f1bbd8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.252667] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 890.252667] env[65758]: value = "task-4660637" [ 890.252667] env[65758]: _type = "Task" [ 890.252667] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.267477] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660637, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.281242] env[65758]: DEBUG nova.compute.manager [req-b5bf0a51-b250-4802-97a1-ac36e8539f91 req-42b03a00-2853-4d4b-bf40-676109aaf877 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Received event network-vif-deleted-1b1c3792-b109-4ead-81ff-2d275ce2dbc7 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 890.281526] env[65758]: INFO nova.compute.manager [req-b5bf0a51-b250-4802-97a1-ac36e8539f91 req-42b03a00-2853-4d4b-bf40-676109aaf877 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Neutron deleted interface 1b1c3792-b109-4ead-81ff-2d275ce2dbc7; detaching it from the instance and deleting it from the info cache [ 890.281711] env[65758]: DEBUG nova.network.neutron [req-b5bf0a51-b250-4802-97a1-ac36e8539f91 req-42b03a00-2853-4d4b-bf40-676109aaf877 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 890.289457] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.825s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.289457] env[65758]: DEBUG nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 890.294564] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.195s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.296291] env[65758]: INFO nova.compute.claims [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.349627] env[65758]: DEBUG nova.network.neutron [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance_info_cache with network_info: [{"id": "ea073371-1ad8-47ae-9cca-67a419a8e219", "address": "fa:16:3e:e4:10:d3", "network": {"id": "0edb721a-a268-4cd1-9bd0-0b2d1ec4d6f6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8a035a70e9c44ef7876c682f0ee3c231", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea073371-1a", "ovs_interfaceid": "ea073371-1ad8-47ae-9cca-67a419a8e219", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 890.353113] env[65758]: DEBUG nova.network.neutron [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updated VIF entry in instance network info cache for port f597ff54-9371-4703-893c-3b7ad96d394d. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 890.353518] env[65758]: DEBUG nova.network.neutron [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updating instance_info_cache with network_info: [{"id": "f597ff54-9371-4703-893c-3b7ad96d394d", "address": "fa:16:3e:cc:36:a7", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf597ff54-93", "ovs_interfaceid": "f597ff54-9371-4703-893c-3b7ad96d394d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 890.367020] env[65758]: DEBUG nova.network.neutron [-] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 890.560809] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.735009] env[65758]: INFO nova.compute.manager [-] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Took 1.27 seconds to deallocate network for instance. [ 890.755964] env[65758]: DEBUG nova.network.neutron [-] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 890.767446] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660637, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513511} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.768157] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] df46c28d-7cbd-490e-8db2-9730e4d9f953/df46c28d-7cbd-490e-8db2-9730e4d9f953.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.768157] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.768292] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfb0d8a7-77d5-421a-bf38-ac775f48fed4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.776707] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 890.776707] env[65758]: value = "task-4660638" [ 890.776707] env[65758]: _type = "Task" [ 890.776707] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.786203] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.790418] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1801e75b-ec21-491c-98ee-3570953189d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.796282] env[65758]: DEBUG nova.compute.utils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 890.799060] env[65758]: DEBUG nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 890.799177] env[65758]: DEBUG nova.network.neutron [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 890.799512] env[65758]: WARNING neutronclient.v2_0.client [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.799827] env[65758]: WARNING neutronclient.v2_0.client [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 890.800876] env[65758]: WARNING openstack [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 890.800876] env[65758]: WARNING openstack [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 890.809679] env[65758]: DEBUG nova.compute.utils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 890.815920] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc2728a-ce01-45d2-adaa-fd24b1a84fe1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.830541] env[65758]: DEBUG nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 890.860441] env[65758]: DEBUG oslo_concurrency.lockutils [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Releasing lock "refresh_cache-105c53ce-e657-4a29-bc7f-96b4f885707a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.860905] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.861357] env[65758]: DEBUG oslo_concurrency.lockutils [req-0ff1b983-4cad-42eb-a9a9-ea958b193889 req-d451b79a-e83e-44a3-b388-73bb3c7fc678 service nova] Releasing lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.861774] env[65758]: DEBUG nova.compute.manager [req-b5bf0a51-b250-4802-97a1-ac36e8539f91 req-42b03a00-2853-4d4b-bf40-676109aaf877 service nova] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Detach interface failed, port_id=1b1c3792-b109-4ead-81ff-2d275ce2dbc7, reason: Instance 47bb5b02-4f84-468e-ad46-2c1c96b65c97 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 890.864652] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ac521d0-95d2-4aec-aa0f-999115957625 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.868959] env[65758]: INFO nova.compute.manager [-] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Took 1.54 seconds to deallocate network for instance. [ 890.875082] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 890.875082] env[65758]: value = "task-4660639" [ 890.875082] env[65758]: _type = "Task" [ 890.875082] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.892064] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.902054] env[65758]: DEBUG nova.policy [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8dcbde9f217e4ebd847282da61e502ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45aad313d10447e9ba61ed0a05b915ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 891.227273] env[65758]: DEBUG nova.network.neutron [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Successfully created port: 25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 891.248414] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.262434] env[65758]: INFO nova.compute.manager [-] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Took 1.44 seconds to deallocate network for instance. [ 891.291329] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072772} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.291799] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.292688] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4e2fd3-191e-4938-8200-b8464fb173ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.326220] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] df46c28d-7cbd-490e-8db2-9730e4d9f953/df46c28d-7cbd-490e-8db2-9730e4d9f953.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.329083] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cb49ff1-08e2-40a5-93d4-326edf135e71 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.352143] env[65758]: DEBUG nova.compute.manager [req-04cecbed-4117-4dd8-a017-6ec1f5ebe9a3 req-bf46b885-1205-4dd9-8749-c9ef9827306c service nova] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Received event network-vif-deleted-a555e91f-164f-4b04-83dd-828041132dcc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 891.352435] env[65758]: DEBUG nova.compute.manager [req-04cecbed-4117-4dd8-a017-6ec1f5ebe9a3 req-bf46b885-1205-4dd9-8749-c9ef9827306c service nova] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Received event network-vif-deleted-c0e6c6d9-40f1-437d-b6c9-3cc445c43b56 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 891.359890] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 891.359890] env[65758]: value = "task-4660640" [ 891.359890] env[65758]: _type = "Task" [ 891.359890] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.375305] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660640, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.376368] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.386311] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660639, 'name': PowerOffVM_Task, 'duration_secs': 0.508274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.389320] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.390011] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:16:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='346c523a-8d39-4f4e-a2d8-eb4e1ab4f9a4',id=28,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1141065059',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 891.390224] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.390372] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 891.390549] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.390680] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 891.390878] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 891.391111] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.391238] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 891.391430] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 891.391596] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 891.391768] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 891.398648] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fc11f7b-1aaa-43cf-b33a-5985ed333cd0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.415432] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1886e7fc-9eeb-423c-bbb0-44b21b661369 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.420072] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 891.420072] env[65758]: value = "task-4660641" [ 891.420072] env[65758]: _type = "Task" [ 891.420072] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.426912] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a4c08b-c423-4f9a-9cbd-18f574567467 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.433888] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660641, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.466231] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3f5039-71a5-456f-ae0f-ecf3c1d07d1a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.475326] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019c9e7d-5118-4396-afbb-770da72dbb85 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.491711] env[65758]: DEBUG nova.compute.provider_tree [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.572600] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.573364] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.573364] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.573714] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.573714] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.576943] env[65758]: INFO nova.compute.manager [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Terminating instance [ 891.773429] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.855168] env[65758]: DEBUG nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 891.871062] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660640, 'name': ReconfigVM_Task, 'duration_secs': 0.301456} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.871382] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfigured VM instance instance-00000045 to attach disk [datastore1] df46c28d-7cbd-490e-8db2-9730e4d9f953/df46c28d-7cbd-490e-8db2-9730e4d9f953.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.872057] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6462f13-7dd0-44a2-bb07-bb4f425d298b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.880627] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 891.880627] env[65758]: value = "task-4660642" [ 891.880627] env[65758]: _type = "Task" [ 891.880627] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.883126] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:14:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1430008269',id=18,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-969015804',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 891.883369] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.883518] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 891.883692] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.883837] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 891.883988] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 891.884211] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.884366] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 891.884630] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 891.884681] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 891.884833] env[65758]: DEBUG nova.virt.hardware [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 891.886330] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358a9476-f483-4b41-8e29-d0dc715930a8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.899503] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660642, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.903068] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672ea5d0-875f-4249-a5ea-0feef1402f83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.930786] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660641, 'name': ReconfigVM_Task, 'duration_secs': 0.375815} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.931802] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f8a9b6-4429-4ba5-a6ef-0998dd346f31 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.952699] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:16:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='346c523a-8d39-4f4e-a2d8-eb4e1ab4f9a4',id=28,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1141065059',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 891.952995] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.953187] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 891.953420] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.953599] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 891.953773] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 891.954049] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.954235] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 891.954404] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 891.954566] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 891.954738] env[65758]: DEBUG nova.virt.hardware [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 891.955614] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b29313d-4ba4-429a-bc90-664256ca2212 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.962568] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 891.962568] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245aa9d-0a66-8215-1437-26852b613762" [ 891.962568] env[65758]: _type = "Task" [ 891.962568] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.975743] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5245aa9d-0a66-8215-1437-26852b613762, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.996533] env[65758]: DEBUG nova.scheduler.client.report [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 892.081418] env[65758]: DEBUG nova.compute.manager [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 892.081655] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.084829] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abe7f10-c938-454a-a469-d4d87a886e90 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.093303] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.093303] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fee2283b-5060-4514-9dce-aaace1c4d258 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.100882] env[65758]: DEBUG oslo_vmware.api [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 892.100882] env[65758]: value = "task-4660643" [ 892.100882] env[65758]: _type = "Task" [ 892.100882] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.110210] env[65758]: DEBUG oslo_vmware.api [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.395816] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660642, 'name': Rename_Task, 'duration_secs': 0.167373} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.396156] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.396400] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7528e267-8698-4519-b868-ec34f4a3ca35 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.404285] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 892.404285] env[65758]: value = "task-4660644" [ 892.404285] env[65758]: _type = "Task" [ 892.404285] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.413394] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660644, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.473502] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5245aa9d-0a66-8215-1437-26852b613762, 'name': SearchDatastore_Task, 'duration_secs': 0.011289} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.479156] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 892.479548] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1edaa1a1-fee8-4b40-88b8-3de42b30880d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.499979] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 892.499979] env[65758]: value = "task-4660645" [ 892.499979] env[65758]: _type = "Task" [ 892.499979] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.504353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.504996] env[65758]: DEBUG nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 892.508644] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.826s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.510274] env[65758]: INFO nova.compute.claims [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.520554] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660645, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.612203] env[65758]: DEBUG oslo_vmware.api [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660643, 'name': PowerOffVM_Task, 'duration_secs': 0.18037} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.612507] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 892.612661] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.612998] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8483e21-e21b-4458-81d5-e1a373fe7b55 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.691330] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 892.691330] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 892.691433] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleting the datastore file [datastore2] a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.693170] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65f6c3d1-8dbe-4211-91e5-55f7fb7ca2e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.700932] env[65758]: DEBUG oslo_vmware.api [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 892.700932] env[65758]: value = "task-4660647" [ 892.700932] env[65758]: _type = "Task" [ 892.700932] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.711186] env[65758]: DEBUG oslo_vmware.api [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.914720] env[65758]: DEBUG oslo_vmware.api [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660644, 'name': PowerOnVM_Task, 'duration_secs': 0.490359} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.915119] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 892.915369] env[65758]: INFO nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Took 6.68 seconds to spawn the instance on the hypervisor. [ 892.915615] env[65758]: DEBUG nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 892.916597] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4986b7b2-52ef-43a9-a17b-e9d408b9b7bc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.013491] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660645, 'name': ReconfigVM_Task, 'duration_secs': 0.243164} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.015463] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 893.015463] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9125f0-b7b8-4a99-b8e6-d9443b9f7c4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.018691] env[65758]: DEBUG nova.compute.utils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 893.026022] env[65758]: DEBUG nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 893.026022] env[65758]: DEBUG nova.network.neutron [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 893.026022] env[65758]: WARNING neutronclient.v2_0.client [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 893.026022] env[65758]: WARNING neutronclient.v2_0.client [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 893.026022] env[65758]: WARNING openstack [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 893.026022] env[65758]: WARNING openstack [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 893.055754] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a/105c53ce-e657-4a29-bc7f-96b4f885707a.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 893.056705] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be95c790-c0b1-4ebb-bf18-539d700be901 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.077168] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 893.077168] env[65758]: value = "task-4660648" [ 893.077168] env[65758]: _type = "Task" [ 893.077168] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.085835] env[65758]: DEBUG nova.policy [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 893.091464] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660648, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.183403] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "31816c0c-d7d2-48db-9a87-a1e03c938a60" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.183685] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "31816c0c-d7d2-48db-9a87-a1e03c938a60" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.183931] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "31816c0c-d7d2-48db-9a87-a1e03c938a60-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.184161] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "31816c0c-d7d2-48db-9a87-a1e03c938a60-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.184317] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "31816c0c-d7d2-48db-9a87-a1e03c938a60-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.186705] env[65758]: INFO nova.compute.manager [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Terminating instance [ 893.212010] env[65758]: DEBUG oslo_vmware.api [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180889} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.212308] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.212661] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.212957] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.213189] env[65758]: INFO nova.compute.manager [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 893.213479] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 893.213763] env[65758]: DEBUG nova.compute.manager [-] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 893.213874] env[65758]: DEBUG nova.network.neutron [-] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 893.214188] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 893.214808] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 893.215156] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 893.268232] env[65758]: DEBUG nova.network.neutron [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Successfully updated port: 25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 893.351832] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 893.439602] env[65758]: INFO nova.compute.manager [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Took 46.71 seconds to build instance. [ 893.480295] env[65758]: DEBUG nova.network.neutron [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Successfully created port: cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 893.525328] env[65758]: DEBUG nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 893.597463] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.628815] env[65758]: DEBUG nova.compute.manager [req-cdb28da1-b4a6-447f-b531-b2e43f145072 req-0c17280e-586b-417a-9ffc-2688c8dcefa5 service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Received event network-vif-plugged-25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 893.628964] env[65758]: DEBUG oslo_concurrency.lockutils [req-cdb28da1-b4a6-447f-b531-b2e43f145072 req-0c17280e-586b-417a-9ffc-2688c8dcefa5 service nova] Acquiring lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.629185] env[65758]: DEBUG oslo_concurrency.lockutils [req-cdb28da1-b4a6-447f-b531-b2e43f145072 req-0c17280e-586b-417a-9ffc-2688c8dcefa5 service nova] Lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.629339] env[65758]: DEBUG oslo_concurrency.lockutils [req-cdb28da1-b4a6-447f-b531-b2e43f145072 req-0c17280e-586b-417a-9ffc-2688c8dcefa5 service nova] Lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.629501] env[65758]: DEBUG nova.compute.manager [req-cdb28da1-b4a6-447f-b531-b2e43f145072 req-0c17280e-586b-417a-9ffc-2688c8dcefa5 service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] No waiting events found dispatching network-vif-plugged-25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 893.629658] env[65758]: WARNING nova.compute.manager [req-cdb28da1-b4a6-447f-b531-b2e43f145072 req-0c17280e-586b-417a-9ffc-2688c8dcefa5 service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Received unexpected event network-vif-plugged-25549e11-fab5-4462-b69b-5fa3581f6d34 for instance with vm_state building and task_state spawning. [ 893.692129] env[65758]: DEBUG nova.compute.manager [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 893.692351] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 893.693529] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36feeb61-7dbf-49f4-a944-0b412ab82612 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.711599] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.712664] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6623016b-e39d-486d-afec-9f7f52a8c9d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.722294] env[65758]: DEBUG oslo_vmware.api [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 893.722294] env[65758]: value = "task-4660649" [ 893.722294] env[65758]: _type = "Task" [ 893.722294] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.733810] env[65758]: DEBUG oslo_vmware.api [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.775777] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.776069] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquired lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.776302] env[65758]: DEBUG nova.network.neutron [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 893.942374] env[65758]: DEBUG oslo_concurrency.lockutils [None req-65072563-731c-4473-b2cc-4031c545d852 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.976s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.079856] env[65758]: DEBUG nova.network.neutron [-] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 894.094860] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660648, 'name': ReconfigVM_Task, 'duration_secs': 0.699149} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.095864] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a/105c53ce-e657-4a29-bc7f-96b4f885707a.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.096753] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f025b45e-a58f-4ae4-8980-a2379d271518 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.120838] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f8b828-bc5c-46ef-bb6a-3fed83a5f53a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.144952] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d16e0d-a2fe-4e34-8885-ae575a1c1c7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.170823] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9d70ec-c388-4896-abac-8b6f8f17fbc9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.180023] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.180023] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13ab6304-7e6b-4cc8-b671-ea6127a1d728 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.189132] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 894.189132] env[65758]: value = "task-4660650" [ 894.189132] env[65758]: _type = "Task" [ 894.189132] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.198616] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660650, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.201744] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d001ce00-512b-4add-bdac-6ec7f1edba1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.213525] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a1a01c-0313-44ac-94d1-9bef89a4e88f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.253020] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c161e3a-1746-4747-929e-36bdd22acb92 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.265123] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92cc67b-b7de-4287-9fa3-2aefbe9dba52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.269458] env[65758]: DEBUG oslo_vmware.api [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660649, 'name': PowerOffVM_Task, 'duration_secs': 0.264062} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.269752] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 894.269921] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 894.270654] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ba2d1b7-9ac0-4433-924a-3a648f8e19ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.283919] env[65758]: WARNING openstack [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 894.284421] env[65758]: WARNING openstack [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 894.292510] env[65758]: DEBUG nova.compute.provider_tree [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.367539] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 894.367762] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 894.367962] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleting the datastore file [datastore1] 31816c0c-d7d2-48db-9a87-a1e03c938a60 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 894.368265] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8721075-1d68-4a1c-aec2-d81c1e82483d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.375367] env[65758]: DEBUG oslo_vmware.api [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 894.375367] env[65758]: value = "task-4660652" [ 894.375367] env[65758]: _type = "Task" [ 894.375367] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.386345] env[65758]: DEBUG oslo_vmware.api [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660652, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.510297] env[65758]: DEBUG nova.network.neutron [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 894.537724] env[65758]: DEBUG nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 894.566367] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 894.566915] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 894.566915] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 894.567061] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 894.567131] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 894.567281] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 894.567529] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 894.567687] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 894.567849] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 894.568025] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 894.568193] env[65758]: DEBUG nova.virt.hardware [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 894.569147] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9158b643-c5c0-4aab-aac8-b82094a7e042 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.578962] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f5eebf-64f1-4d80-9bf3-2b0d3ecf307e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.595698] env[65758]: INFO nova.compute.manager [-] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Took 1.38 seconds to deallocate network for instance. [ 894.688900] env[65758]: WARNING neutronclient.v2_0.client [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 894.689911] env[65758]: WARNING openstack [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 894.691028] env[65758]: WARNING openstack [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 894.709221] env[65758]: DEBUG oslo_vmware.api [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660650, 'name': PowerOnVM_Task, 'duration_secs': 0.462391} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.709566] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.796855] env[65758]: DEBUG nova.scheduler.client.report [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.811495] env[65758]: DEBUG nova.network.neutron [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Updating instance_info_cache with network_info: [{"id": "25549e11-fab5-4462-b69b-5fa3581f6d34", "address": "fa:16:3e:8d:f4:ce", "network": {"id": "3770aad6-39a0-41da-84d1-b6aa69c0dfad", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-982589002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45aad313d10447e9ba61ed0a05b915ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25549e11-fa", "ovs_interfaceid": "25549e11-fab5-4462-b69b-5fa3581f6d34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 894.886497] env[65758]: DEBUG oslo_vmware.api [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660652, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.360927} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.886767] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.886948] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.887144] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.887322] env[65758]: INFO nova.compute.manager [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Took 1.20 seconds to destroy the instance on the hypervisor. [ 894.887640] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 894.887825] env[65758]: DEBUG nova.compute.manager [-] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 894.887926] env[65758]: DEBUG nova.network.neutron [-] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 894.888205] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 894.890964] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 894.890964] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 895.007018] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 895.103216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.167531] env[65758]: DEBUG nova.network.neutron [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Successfully updated port: cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 895.303230] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.794s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.303823] env[65758]: DEBUG nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 895.308080] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.010s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.308294] env[65758]: DEBUG nova.objects.instance [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lazy-loading 'resources' on Instance uuid 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.315764] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Releasing lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.316172] env[65758]: DEBUG nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Instance network_info: |[{"id": "25549e11-fab5-4462-b69b-5fa3581f6d34", "address": "fa:16:3e:8d:f4:ce", "network": {"id": "3770aad6-39a0-41da-84d1-b6aa69c0dfad", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-982589002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45aad313d10447e9ba61ed0a05b915ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25549e11-fa", "ovs_interfaceid": "25549e11-fab5-4462-b69b-5fa3581f6d34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 895.317274] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:f4:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ada35c98-01a9-4352-98e4-1d20ba31f928', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25549e11-fab5-4462-b69b-5fa3581f6d34', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.325961] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 895.326657] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 895.328090] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-217112b7-a2ea-496b-873c-795d6c73fbb1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.354615] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.354615] env[65758]: value = "task-4660653" [ 895.354615] env[65758]: _type = "Task" [ 895.354615] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.364420] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660653, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.501628] env[65758]: DEBUG nova.compute.manager [req-7c05745e-9887-4fc2-9573-b8a67b5c08aa req-2eae2715-304d-4cae-a76c-b8aa3f585453 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received event network-vif-plugged-cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 895.501846] env[65758]: DEBUG oslo_concurrency.lockutils [req-7c05745e-9887-4fc2-9573-b8a67b5c08aa req-2eae2715-304d-4cae-a76c-b8aa3f585453 service nova] Acquiring lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.502262] env[65758]: DEBUG oslo_concurrency.lockutils [req-7c05745e-9887-4fc2-9573-b8a67b5c08aa req-2eae2715-304d-4cae-a76c-b8aa3f585453 service nova] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.502262] env[65758]: DEBUG oslo_concurrency.lockutils [req-7c05745e-9887-4fc2-9573-b8a67b5c08aa req-2eae2715-304d-4cae-a76c-b8aa3f585453 service nova] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.502396] env[65758]: DEBUG nova.compute.manager [req-7c05745e-9887-4fc2-9573-b8a67b5c08aa req-2eae2715-304d-4cae-a76c-b8aa3f585453 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] No waiting events found dispatching network-vif-plugged-cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 895.502565] env[65758]: WARNING nova.compute.manager [req-7c05745e-9887-4fc2-9573-b8a67b5c08aa req-2eae2715-304d-4cae-a76c-b8aa3f585453 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received unexpected event network-vif-plugged-cdcc66de-e599-4e26-8757-617493c55e00 for instance with vm_state building and task_state spawning. [ 895.665614] env[65758]: DEBUG nova.compute.manager [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Received event network-changed-25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 895.665902] env[65758]: DEBUG nova.compute.manager [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Refreshing instance network info cache due to event network-changed-25549e11-fab5-4462-b69b-5fa3581f6d34. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 895.666427] env[65758]: DEBUG oslo_concurrency.lockutils [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Acquiring lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.666427] env[65758]: DEBUG oslo_concurrency.lockutils [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Acquired lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.666427] env[65758]: DEBUG nova.network.neutron [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Refreshing network info cache for port 25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 895.673224] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.673224] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.673224] env[65758]: DEBUG nova.network.neutron [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 895.724492] env[65758]: INFO nova.compute.manager [None req-607d7ad5-5ce5-40e2-828a-f12b5fffff8a tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance to original state: 'active' [ 895.756870] env[65758]: DEBUG nova.network.neutron [-] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 895.812841] env[65758]: DEBUG nova.compute.utils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 895.817558] env[65758]: DEBUG nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 895.817776] env[65758]: DEBUG nova.network.neutron [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 895.818123] env[65758]: WARNING neutronclient.v2_0.client [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 895.818437] env[65758]: WARNING neutronclient.v2_0.client [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 895.819042] env[65758]: WARNING openstack [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 895.819415] env[65758]: WARNING openstack [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 895.869273] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660653, 'name': CreateVM_Task, 'duration_secs': 0.4093} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.872318] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 895.874120] env[65758]: WARNING neutronclient.v2_0.client [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 895.874191] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.874723] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.875439] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 895.875845] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8fdc399-2483-4ff9-acdf-48df37c91cb5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.881846] env[65758]: DEBUG nova.policy [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd63bda0326124f8eb9ee6d515a6a7320', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '111dc87614bb42e2bc66ae1bfb092795', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 895.890246] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 895.890246] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52230b00-ea4a-bd53-405f-ba3b9a22f0a7" [ 895.890246] env[65758]: _type = "Task" [ 895.890246] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.898332] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52230b00-ea4a-bd53-405f-ba3b9a22f0a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.967039] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Acquiring lock "d42d0818-1486-4696-9871-2cf989aeb885" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.967350] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "d42d0818-1486-4696-9871-2cf989aeb885" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.967561] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Acquiring lock "d42d0818-1486-4696-9871-2cf989aeb885-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.967738] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "d42d0818-1486-4696-9871-2cf989aeb885-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.967902] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "d42d0818-1486-4696-9871-2cf989aeb885-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.970290] env[65758]: INFO nova.compute.manager [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Terminating instance [ 896.168698] env[65758]: WARNING neutronclient.v2_0.client [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 896.169719] env[65758]: WARNING openstack [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 896.169894] env[65758]: WARNING openstack [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 896.182588] env[65758]: WARNING openstack [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 896.183269] env[65758]: WARNING openstack [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 896.238491] env[65758]: DEBUG nova.network.neutron [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Successfully created port: d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 896.254706] env[65758]: DEBUG nova.network.neutron [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 896.259648] env[65758]: INFO nova.compute.manager [-] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Took 1.37 seconds to deallocate network for instance. [ 896.320153] env[65758]: DEBUG nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 896.382854] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea75540-d052-4004-88d6-1472f37d3a37 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.395945] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6834eacd-629e-484b-b137-affa68caf7e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.405288] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52230b00-ea4a-bd53-405f-ba3b9a22f0a7, 'name': SearchDatastore_Task, 'duration_secs': 0.028953} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.406353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.406640] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.406899] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.407043] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.407224] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.407487] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30f9d4b3-31fe-4d70-a7cd-1e28316464a8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.438401] env[65758]: WARNING neutronclient.v2_0.client [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 896.439080] env[65758]: WARNING openstack [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 896.439467] env[65758]: WARNING openstack [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 896.447837] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d71aa35-2f7d-43cc-bd01-9e2edc5010b4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.457671] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af1d212-bb65-4719-87f6-f97af3e7bb88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.461663] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.461869] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 896.463652] env[65758]: WARNING neutronclient.v2_0.client [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 896.464302] env[65758]: WARNING openstack [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 896.464646] env[65758]: WARNING openstack [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 896.471841] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7432088f-b061-4411-abd6-951a19cd7c88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.477018] env[65758]: DEBUG nova.compute.manager [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 896.477245] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 896.478103] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0e73064-b957-4527-aeb2-2e91401b9f99 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.488777] env[65758]: DEBUG nova.compute.provider_tree [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.490051] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 896.490051] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52547966-9ce3-71b1-2976-0265ae305c7a" [ 896.490051] env[65758]: _type = "Task" [ 896.490051] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.497370] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 896.497370] env[65758]: value = "task-4660654" [ 896.497370] env[65758]: _type = "Task" [ 896.497370] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.501819] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52547966-9ce3-71b1-2976-0265ae305c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.515771] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660654, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.655038] env[65758]: DEBUG nova.network.neutron [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [{"id": "cdcc66de-e599-4e26-8757-617493c55e00", "address": "fa:16:3e:6f:f2:e7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcc66de-e5", "ovs_interfaceid": "cdcc66de-e599-4e26-8757-617493c55e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 896.694242] env[65758]: DEBUG nova.network.neutron [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Updated VIF entry in instance network info cache for port 25549e11-fab5-4462-b69b-5fa3581f6d34. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 896.694624] env[65758]: DEBUG nova.network.neutron [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Updating instance_info_cache with network_info: [{"id": "25549e11-fab5-4462-b69b-5fa3581f6d34", "address": "fa:16:3e:8d:f4:ce", "network": {"id": "3770aad6-39a0-41da-84d1-b6aa69c0dfad", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-982589002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45aad313d10447e9ba61ed0a05b915ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25549e11-fa", "ovs_interfaceid": "25549e11-fab5-4462-b69b-5fa3581f6d34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 896.768078] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.991797] env[65758]: DEBUG nova.scheduler.client.report [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 897.009463] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52547966-9ce3-71b1-2976-0265ae305c7a, 'name': SearchDatastore_Task, 'duration_secs': 0.021685} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.010896] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-641ff21f-4a62-4b17-91eb-a93fc51785c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.019265] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660654, 'name': PowerOffVM_Task, 'duration_secs': 0.253983} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.020132] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.020296] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 897.020534] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909892', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'name': 'volume-21f94ac1-a7a7-4e71-865b-3193eae1848e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd42d0818-1486-4696-9871-2cf989aeb885', 'attached_at': '', 'detached_at': '', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'serial': '21f94ac1-a7a7-4e71-865b-3193eae1848e'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 897.021482] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561c3ead-8458-4cd9-a156-51a0bae7359c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.027236] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 897.027236] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5247a259-2597-bfd1-88a6-5b65c3ba7a60" [ 897.027236] env[65758]: _type = "Task" [ 897.027236] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.049452] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fe8595-e9d4-4fdb-b769-b74c93cbc211 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.059743] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5247a259-2597-bfd1-88a6-5b65c3ba7a60, 'name': SearchDatastore_Task, 'duration_secs': 0.016059} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.060358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.060514] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] e93528eb-33d0-46d1-94e8-d1d66f2c682f/e93528eb-33d0-46d1-94e8-d1d66f2c682f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 897.060800] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a73cb31b-1777-4094-97f4-8c9e5195520a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.066018] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c63d31-d10d-4eb8-a473-adf4087f8a86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.070264] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 897.070264] env[65758]: value = "task-4660655" [ 897.070264] env[65758]: _type = "Task" [ 897.070264] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.090204] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506b3782-5d40-4c1b-a913-8135721cadf2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.096649] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.110675] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] The volume has not been displaced from its original location: [datastore1] volume-21f94ac1-a7a7-4e71-865b-3193eae1848e/volume-21f94ac1-a7a7-4e71-865b-3193eae1848e.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 897.116302] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Reconfiguring VM instance instance-0000003c to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 897.116771] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56c99c23-faf8-4a9a-bf7d-acd060ec423b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.136754] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 897.136754] env[65758]: value = "task-4660656" [ 897.136754] env[65758]: _type = "Task" [ 897.136754] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.151305] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660656, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.158034] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.158034] env[65758]: DEBUG nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Instance network_info: |[{"id": "cdcc66de-e599-4e26-8757-617493c55e00", "address": "fa:16:3e:6f:f2:e7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcc66de-e5", "ovs_interfaceid": "cdcc66de-e599-4e26-8757-617493c55e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 897.158529] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:f2:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdcc66de-e599-4e26-8757-617493c55e00', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.168234] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Creating folder: Project (64ffccae76ed401582dd915ae5f87922). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 897.169114] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ed02ee6-e017-423e-b490-6771447a681d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.185017] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Created folder: Project (64ffccae76ed401582dd915ae5f87922) in parent group-v909763. [ 897.185017] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Creating folder: Instances. Parent ref: group-v909951. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 897.185258] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bf7b79b-36a5-4389-bff4-9a5fd926d81c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.197887] env[65758]: DEBUG oslo_concurrency.lockutils [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Releasing lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.198195] env[65758]: DEBUG nova.compute.manager [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Received event network-vif-deleted-6feb5c74-32da-4591-acdf-492fb5e3aebb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 897.198433] env[65758]: DEBUG nova.compute.manager [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Received event network-changed-f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 897.198613] env[65758]: DEBUG nova.compute.manager [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Refreshing instance network info cache due to event network-changed-f597ff54-9371-4703-893c-3b7ad96d394d. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 897.198820] env[65758]: DEBUG oslo_concurrency.lockutils [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Acquiring lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.198955] env[65758]: DEBUG oslo_concurrency.lockutils [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Acquired lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.199167] env[65758]: DEBUG nova.network.neutron [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Refreshing network info cache for port f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 897.200473] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Created folder: Instances in parent group-v909951. [ 897.200727] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 897.200984] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.201266] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45f9481e-4e99-4220-8171-ebb783efb4ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.225518] env[65758]: WARNING neutronclient.v2_0.client [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 897.225518] env[65758]: WARNING openstack [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 897.225818] env[65758]: WARNING openstack [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 897.239940] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.239940] env[65758]: value = "task-4660659" [ 897.239940] env[65758]: _type = "Task" [ 897.239940] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.250887] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660659, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.333327] env[65758]: DEBUG nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 897.369702] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 897.369955] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 897.370123] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 897.370302] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 897.371126] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 897.371126] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 897.371126] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 897.371126] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 897.371126] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 897.371426] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 897.371458] env[65758]: DEBUG nova.virt.hardware [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 897.372366] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b198b40b-ee36-429b-8f2b-b0c2cd6172fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.382072] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31113b6c-6df8-43fd-811d-b7fe483a2c7f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.503974] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.196s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.506807] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.346s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.508609] env[65758]: INFO nova.compute.claims [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.541025] env[65758]: INFO nova.scheduler.client.report [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted allocations for instance 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca [ 897.585082] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660655, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.624190] env[65758]: WARNING neutronclient.v2_0.client [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 897.624899] env[65758]: WARNING openstack [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 897.625279] env[65758]: WARNING openstack [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 897.650346] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660656, 'name': ReconfigVM_Task, 'duration_secs': 0.205736} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.650346] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Reconfigured VM instance instance-0000003c to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 897.654792] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68a64687-03c8-4937-9da2-00fa1b4d086d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.672739] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 897.672739] env[65758]: value = "task-4660660" [ 897.672739] env[65758]: _type = "Task" [ 897.672739] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.683870] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.754124] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660659, 'name': CreateVM_Task, 'duration_secs': 0.406159} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.754124] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.754124] env[65758]: WARNING neutronclient.v2_0.client [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 897.754124] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.755025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.755025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 897.755155] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-992652ba-9d95-4073-82de-17df8539a1ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.763349] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 897.763349] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a7f1ee-c8bd-6143-4395-e0090c5e715f" [ 897.763349] env[65758]: _type = "Task" [ 897.763349] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.769444] env[65758]: DEBUG nova.network.neutron [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updated VIF entry in instance network info cache for port f597ff54-9371-4703-893c-3b7ad96d394d. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 897.769901] env[65758]: DEBUG nova.network.neutron [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updating instance_info_cache with network_info: [{"id": "f597ff54-9371-4703-893c-3b7ad96d394d", "address": "fa:16:3e:cc:36:a7", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf597ff54-93", "ovs_interfaceid": "f597ff54-9371-4703-893c-3b7ad96d394d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 897.781393] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a7f1ee-c8bd-6143-4395-e0090c5e715f, 'name': SearchDatastore_Task, 'duration_secs': 0.010791} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.781393] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.781393] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.781690] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.782130] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.782674] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.783246] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cedd59ec-2263-43c5-aafb-b75abbd10c5c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.794811] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.794998] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.796467] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c5e4a5b-59b9-4b96-a0b1-cff11754353c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.804273] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 897.804273] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520cfe87-52ce-41c4-5551-9807b72c73d7" [ 897.804273] env[65758]: _type = "Task" [ 897.804273] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.810813] env[65758]: DEBUG nova.compute.manager [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received event network-changed-cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 897.811043] env[65758]: DEBUG nova.compute.manager [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Refreshing instance network info cache due to event network-changed-cdcc66de-e599-4e26-8757-617493c55e00. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 897.811298] env[65758]: DEBUG oslo_concurrency.lockutils [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] Acquiring lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.811435] env[65758]: DEBUG oslo_concurrency.lockutils [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] Acquired lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.811607] env[65758]: DEBUG nova.network.neutron [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Refreshing network info cache for port cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 897.819749] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520cfe87-52ce-41c4-5551-9807b72c73d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.918099] env[65758]: DEBUG nova.network.neutron [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Successfully updated port: d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 898.061021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7942cf23-b62f-440d-aaea-3821b393f1a4 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "3049c522-d3bc-4ccf-93bd-0d1efe41d1ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.302s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.084292] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660655, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.692826} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.084516] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] e93528eb-33d0-46d1-94e8-d1d66f2c682f/e93528eb-33d0-46d1-94e8-d1d66f2c682f.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 898.084734] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 898.085035] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1c3b584-c079-4ce9-8716-bd3561375866 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.093945] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 898.093945] env[65758]: value = "task-4660661" [ 898.093945] env[65758]: _type = "Task" [ 898.093945] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.103884] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660661, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.186430] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660660, 'name': ReconfigVM_Task, 'duration_secs': 0.180082} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.186740] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909892', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'name': 'volume-21f94ac1-a7a7-4e71-865b-3193eae1848e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd42d0818-1486-4696-9871-2cf989aeb885', 'attached_at': '', 'detached_at': '', 'volume_id': '21f94ac1-a7a7-4e71-865b-3193eae1848e', 'serial': '21f94ac1-a7a7-4e71-865b-3193eae1848e'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 898.187203] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 898.188021] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb16673-dcb5-482b-964b-e914972da435 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.196089] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 898.196341] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ba7512c-4fb0-4b48-bed3-e91b73d4bbc8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.258768] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 898.259220] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 898.259492] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Deleting the datastore file [datastore1] d42d0818-1486-4696-9871-2cf989aeb885 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.259888] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8709791-26f0-4f9f-b8c0-3407817530b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.271352] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for the task: (returnval){ [ 898.271352] env[65758]: value = "task-4660663" [ 898.271352] env[65758]: _type = "Task" [ 898.271352] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.276078] env[65758]: DEBUG oslo_concurrency.lockutils [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] Releasing lock "refresh_cache-df46c28d-7cbd-490e-8db2-9730e4d9f953" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.276435] env[65758]: DEBUG nova.compute.manager [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Received event network-vif-deleted-328056a5-b991-4a04-8444-c1de0afdf0ab {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 898.276629] env[65758]: INFO nova.compute.manager [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Neutron deleted interface 328056a5-b991-4a04-8444-c1de0afdf0ab; detaching it from the instance and deleting it from the info cache [ 898.276690] env[65758]: DEBUG nova.network.neutron [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 898.284662] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.315777] env[65758]: WARNING neutronclient.v2_0.client [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 898.316454] env[65758]: WARNING openstack [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 898.316832] env[65758]: WARNING openstack [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 898.323973] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520cfe87-52ce-41c4-5551-9807b72c73d7, 'name': SearchDatastore_Task, 'duration_secs': 0.016857} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.325486] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e90130e-0501-4868-9023-0ee233fd428b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.332251] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 898.332251] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526a90bf-0e2c-3205-1362-9b0c4d5af402" [ 898.332251] env[65758]: _type = "Task" [ 898.332251] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.341246] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526a90bf-0e2c-3205-1362-9b0c4d5af402, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.423667] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.423869] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.424116] env[65758]: DEBUG nova.network.neutron [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 898.487318] env[65758]: WARNING neutronclient.v2_0.client [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 898.487988] env[65758]: WARNING openstack [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 898.488360] env[65758]: WARNING openstack [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 898.562324] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "105c53ce-e657-4a29-bc7f-96b4f885707a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.562494] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.562702] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.562922] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.563165] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.566053] env[65758]: INFO nova.compute.manager [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Terminating instance [ 898.602233] env[65758]: DEBUG nova.network.neutron [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updated VIF entry in instance network info cache for port cdcc66de-e599-4e26-8757-617493c55e00. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 898.602579] env[65758]: DEBUG nova.network.neutron [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [{"id": "cdcc66de-e599-4e26-8757-617493c55e00", "address": "fa:16:3e:6f:f2:e7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcc66de-e5", "ovs_interfaceid": "cdcc66de-e599-4e26-8757-617493c55e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 898.610653] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660661, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070116} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.611521] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 898.611750] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceffa229-4f6e-452f-a9e5-0e7f970e614e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.638539] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] e93528eb-33d0-46d1-94e8-d1d66f2c682f/e93528eb-33d0-46d1-94e8-d1d66f2c682f.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.641910] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15b141e4-9d31-4c88-8d1e-fb75527be1ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.662997] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 898.662997] env[65758]: value = "task-4660664" [ 898.662997] env[65758]: _type = "Task" [ 898.662997] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.674788] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660664, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.781391] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d385b127-ef6a-48f2-a2ac-58d209a3f803 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.787565] env[65758]: DEBUG oslo_vmware.api [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Task: {'id': task-4660663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.499473} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.788813] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 898.788813] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 898.788813] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 898.788813] env[65758]: INFO nova.compute.manager [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Took 2.31 seconds to destroy the instance on the hypervisor. [ 898.788958] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 898.789163] env[65758]: DEBUG nova.compute.manager [-] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 898.789250] env[65758]: DEBUG nova.network.neutron [-] [instance: d42d0818-1486-4696-9871-2cf989aeb885] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 898.789578] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 898.790374] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 898.790374] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 898.804102] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e4c9d0-0271-4a58-94f5-8f043680694e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.846678] env[65758]: DEBUG nova.compute.manager [req-b2c98f7b-73a9-42be-bf35-d60685601fb6 req-612fc858-92c6-4021-bfb9-f9caf684c1dd service nova] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Detach interface failed, port_id=328056a5-b991-4a04-8444-c1de0afdf0ab, reason: Instance 31816c0c-d7d2-48db-9a87-a1e03c938a60 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 898.856817] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526a90bf-0e2c-3205-1362-9b0c4d5af402, 'name': SearchDatastore_Task, 'duration_secs': 0.041284} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.859704] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.860040] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] ba3153f2-8e6f-469c-8730-957c5eebe97b/ba3153f2-8e6f-469c-8730-957c5eebe97b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.861032] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a47a0f7-c738-49c1-8aa8-2cf3abb50fa0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.868800] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 898.868800] env[65758]: value = "task-4660665" [ 898.868800] env[65758]: _type = "Task" [ 898.868800] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.874897] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "2d787237-26e5-4519-9f6e-1d30b9d016cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.875144] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "2d787237-26e5-4519-9f6e-1d30b9d016cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.875344] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "2d787237-26e5-4519-9f6e-1d30b9d016cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.875520] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "2d787237-26e5-4519-9f6e-1d30b9d016cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.875680] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "2d787237-26e5-4519-9f6e-1d30b9d016cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.877943] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 898.880547] env[65758]: INFO nova.compute.manager [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Terminating instance [ 898.885469] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.927313] env[65758]: WARNING openstack [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 898.927724] env[65758]: WARNING openstack [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 898.967686] env[65758]: DEBUG nova.network.neutron [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 899.059875] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30da547c-cab4-4472-9736-0f8ce8933878 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.064886] env[65758]: WARNING neutronclient.v2_0.client [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 899.065598] env[65758]: WARNING openstack [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 899.065971] env[65758]: WARNING openstack [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 899.073735] env[65758]: DEBUG nova.compute.manager [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 899.073945] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.077218] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e756f8d-51db-4762-9c4c-6a847fa3b6f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.080741] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f885eb4-1375-4a09-8513-d17b8b4a8d9d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.089327] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.116524] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1067eb1-3bee-4904-ba46-0617ae63761b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.118625] env[65758]: DEBUG oslo_concurrency.lockutils [req-b9214864-a452-49ed-a6eb-f0a6ce523f9b req-854b3408-4c20-4065-8786-21da8079a720 service nova] Releasing lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.119727] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad0393a-6a77-47d0-85a8-2dc1fd4193cf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.130346] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab701e8-da0f-4c3b-b936-8b5d5b514c9e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.135073] env[65758]: DEBUG oslo_vmware.api [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 899.135073] env[65758]: value = "task-4660666" [ 899.135073] env[65758]: _type = "Task" [ 899.135073] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.151529] env[65758]: DEBUG nova.compute.provider_tree [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.160556] env[65758]: DEBUG oslo_vmware.api [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.176152] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660664, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.264741] env[65758]: DEBUG nova.network.neutron [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updating instance_info_cache with network_info: [{"id": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "address": "fa:16:3e:08:ef:77", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93558c3-4a", "ovs_interfaceid": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 899.379433] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660665, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.388975] env[65758]: DEBUG nova.compute.manager [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 899.389242] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.390195] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b38f4e2-d6f1-4183-9a89-c93b05723281 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.401316] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.402178] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9c52c47-0178-4f0a-b68e-b532a304aaec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.411215] env[65758]: DEBUG oslo_vmware.api [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 899.411215] env[65758]: value = "task-4660667" [ 899.411215] env[65758]: _type = "Task" [ 899.411215] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.421851] env[65758]: DEBUG oslo_vmware.api [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.543758] env[65758]: DEBUG nova.compute.manager [req-b4a3e520-ffc7-437e-a35f-f9f87c363aac req-0d2b6e88-89ed-48ff-945d-b51cc484dea8 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Received event network-vif-deleted-bb884939-9aaf-474f-9246-eb279d11aa4e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 899.543758] env[65758]: INFO nova.compute.manager [req-b4a3e520-ffc7-437e-a35f-f9f87c363aac req-0d2b6e88-89ed-48ff-945d-b51cc484dea8 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Neutron deleted interface bb884939-9aaf-474f-9246-eb279d11aa4e; detaching it from the instance and deleting it from the info cache [ 899.543758] env[65758]: DEBUG nova.network.neutron [req-b4a3e520-ffc7-437e-a35f-f9f87c363aac req-0d2b6e88-89ed-48ff-945d-b51cc484dea8 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 899.647047] env[65758]: DEBUG oslo_vmware.api [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660666, 'name': PowerOffVM_Task, 'duration_secs': 0.233784} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.647358] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.647529] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.647793] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca98f284-9778-4494-879c-803cfc39bc32 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.654598] env[65758]: DEBUG nova.scheduler.client.report [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 899.673947] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660664, 'name': ReconfigVM_Task, 'duration_secs': 0.702933} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.674331] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Reconfigured VM instance instance-00000046 to attach disk [datastore1] e93528eb-33d0-46d1-94e8-d1d66f2c682f/e93528eb-33d0-46d1-94e8-d1d66f2c682f.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.674660] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=65758) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 899.676322] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-c2269c7e-012f-492b-bc42-ff638b483221 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.684277] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 899.684277] env[65758]: value = "task-4660669" [ 899.684277] env[65758]: _type = "Task" [ 899.684277] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.695107] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660669, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.725186] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 899.725453] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 899.725637] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleting the datastore file [datastore1] 105c53ce-e657-4a29-bc7f-96b4f885707a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 899.725944] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-401e74df-ee90-44d3-8ebf-1b06244c5b3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.734463] env[65758]: DEBUG oslo_vmware.api [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 899.734463] env[65758]: value = "task-4660670" [ 899.734463] env[65758]: _type = "Task" [ 899.734463] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.749672] env[65758]: DEBUG oslo_vmware.api [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660670, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.767712] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Releasing lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.768070] env[65758]: DEBUG nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Instance network_info: |[{"id": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "address": "fa:16:3e:08:ef:77", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93558c3-4a", "ovs_interfaceid": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 899.768686] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:ef:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd93558c3-4a75-4e02-98d6-de4d1cf9dee2', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.778084] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 899.778363] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.778601] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53a59525-5486-4528-832a-295b8efe7d23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.799411] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.799411] env[65758]: value = "task-4660671" [ 899.799411] env[65758]: _type = "Task" [ 899.799411] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.808638] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660671, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.844146] env[65758]: DEBUG nova.compute.manager [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Received event network-vif-plugged-d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 899.844386] env[65758]: DEBUG oslo_concurrency.lockutils [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Acquiring lock "875cbc88-f817-4ea8-a969-b97e875918d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.844659] env[65758]: DEBUG oslo_concurrency.lockutils [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Lock "875cbc88-f817-4ea8-a969-b97e875918d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.844797] env[65758]: DEBUG oslo_concurrency.lockutils [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Lock "875cbc88-f817-4ea8-a969-b97e875918d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.844984] env[65758]: DEBUG nova.compute.manager [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] No waiting events found dispatching network-vif-plugged-d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 899.845312] env[65758]: WARNING nova.compute.manager [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Received unexpected event network-vif-plugged-d93558c3-4a75-4e02-98d6-de4d1cf9dee2 for instance with vm_state building and task_state spawning. [ 899.845526] env[65758]: DEBUG nova.compute.manager [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Received event network-changed-d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 899.845705] env[65758]: DEBUG nova.compute.manager [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Refreshing instance network info cache due to event network-changed-d93558c3-4a75-4e02-98d6-de4d1cf9dee2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 899.846029] env[65758]: DEBUG oslo_concurrency.lockutils [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Acquiring lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.846237] env[65758]: DEBUG oslo_concurrency.lockutils [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Acquired lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.846447] env[65758]: DEBUG nova.network.neutron [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Refreshing network info cache for port d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 899.870351] env[65758]: DEBUG nova.network.neutron [-] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 899.884348] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765889} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.885249] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] ba3153f2-8e6f-469c-8730-957c5eebe97b/ba3153f2-8e6f-469c-8730-957c5eebe97b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.885472] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.885743] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7840c41-3048-42fb-b0bb-bd42dfb6ea3e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.895625] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 899.895625] env[65758]: value = "task-4660672" [ 899.895625] env[65758]: _type = "Task" [ 899.895625] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.906665] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660672, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.922040] env[65758]: DEBUG oslo_vmware.api [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660667, 'name': PowerOffVM_Task, 'duration_secs': 0.250988} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.922259] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.922399] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.923072] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87cf442c-c2db-426e-b36d-a5be9f0b78c7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.014925] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.015376] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.015705] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleting the datastore file [datastore2] 2d787237-26e5-4519-9f6e-1d30b9d016cf {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.016175] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ff823b7-0ce2-4d9b-a854-f95572e1691e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.025157] env[65758]: DEBUG oslo_vmware.api [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for the task: (returnval){ [ 900.025157] env[65758]: value = "task-4660674" [ 900.025157] env[65758]: _type = "Task" [ 900.025157] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.035601] env[65758]: DEBUG oslo_vmware.api [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.046437] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa630712-b89b-4197-b4b8-185a0139778f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.058233] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1d7bec-d90b-405a-a968-3e8f61be300a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.093543] env[65758]: DEBUG nova.compute.manager [req-b4a3e520-ffc7-437e-a35f-f9f87c363aac req-0d2b6e88-89ed-48ff-945d-b51cc484dea8 service nova] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Detach interface failed, port_id=bb884939-9aaf-474f-9246-eb279d11aa4e, reason: Instance d42d0818-1486-4696-9871-2cf989aeb885 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 900.160595] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.161318] env[65758]: DEBUG nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 900.164143] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.925s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.164386] env[65758]: DEBUG nova.objects.instance [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lazy-loading 'resources' on Instance uuid 85082b72-89dd-47b7-b8ad-f2ad5ad0638d {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.197758] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660669, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.056695} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.198053] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=65758) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 900.198878] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b45ecc-f003-443d-bfbd-080170c07cd1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.227514] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] e93528eb-33d0-46d1-94e8-d1d66f2c682f/ephemeral_0.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.228211] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b57d935-16dd-4914-ad53-6674ec7ebabe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.251047] env[65758]: DEBUG oslo_vmware.api [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660670, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202995} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.252550] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.252890] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.253243] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.253394] env[65758]: INFO nova.compute.manager [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Took 1.18 seconds to destroy the instance on the hypervisor. [ 900.253661] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 900.253951] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 900.253951] env[65758]: value = "task-4660675" [ 900.253951] env[65758]: _type = "Task" [ 900.253951] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.254264] env[65758]: DEBUG nova.compute.manager [-] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 900.254390] env[65758]: DEBUG nova.network.neutron [-] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 900.254669] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.255290] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.255569] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.274996] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.310410] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660671, 'name': CreateVM_Task, 'duration_secs': 0.368436} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.310608] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.311163] env[65758]: WARNING neutronclient.v2_0.client [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.311534] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.311679] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.312020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 900.312389] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5447f2c5-64cb-47e2-903e-6378c66e4ee4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.318427] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 900.318427] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d0c58d-b87b-6169-5dcc-009f673d62d9" [ 900.318427] env[65758]: _type = "Task" [ 900.318427] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.329552] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d0c58d-b87b-6169-5dcc-009f673d62d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.346037] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.349942] env[65758]: WARNING neutronclient.v2_0.client [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.350959] env[65758]: WARNING openstack [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.350959] env[65758]: WARNING openstack [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.378258] env[65758]: INFO nova.compute.manager [-] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Took 1.59 seconds to deallocate network for instance. [ 900.407729] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660672, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08112} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.407967] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 900.408796] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab0ce28-48e6-4b27-ad54-9fba46e19095 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.432981] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] ba3153f2-8e6f-469c-8730-957c5eebe97b/ba3153f2-8e6f-469c-8730-957c5eebe97b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.432981] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f321873-2c25-4553-a0b5-c86ced7a1ec0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.455185] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 900.455185] env[65758]: value = "task-4660676" [ 900.455185] env[65758]: _type = "Task" [ 900.455185] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.463015] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660676, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.537763] env[65758]: DEBUG oslo_vmware.api [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Task: {'id': task-4660674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18941} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.538142] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.538438] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.538532] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.538779] env[65758]: INFO nova.compute.manager [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Took 1.15 seconds to destroy the instance on the hypervisor. [ 900.539117] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 900.539391] env[65758]: DEBUG nova.compute.manager [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 900.539504] env[65758]: DEBUG nova.network.neutron [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 900.539847] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.540574] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.541620] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.617523] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.671772] env[65758]: DEBUG nova.compute.utils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 900.677625] env[65758]: DEBUG nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 900.678178] env[65758]: DEBUG nova.network.neutron [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 900.678748] env[65758]: WARNING neutronclient.v2_0.client [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.679374] env[65758]: WARNING neutronclient.v2_0.client [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.680504] env[65758]: WARNING openstack [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.682691] env[65758]: WARNING openstack [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.695660] env[65758]: WARNING neutronclient.v2_0.client [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 900.697721] env[65758]: WARNING openstack [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 900.698295] env[65758]: WARNING openstack [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 900.750108] env[65758]: DEBUG nova.policy [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd07b5ba2c3ef430293fbf39148961763', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bad3e3c7054c424a800cb12e9c5dbb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 900.775639] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.831993] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d0c58d-b87b-6169-5dcc-009f673d62d9, 'name': SearchDatastore_Task, 'duration_secs': 0.015455} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.832268] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.832389] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.832785] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.832882] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.833320] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.833608] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4df047dc-fb39-4124-9f4c-c78983a1ebdf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.844642] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.844894] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.846027] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e090465-a6da-4824-a6fe-657c576148e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.852887] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 900.852887] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e686fd-5ddf-fd8e-518a-ebb922ca27af" [ 900.852887] env[65758]: _type = "Task" [ 900.852887] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.863041] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e686fd-5ddf-fd8e-518a-ebb922ca27af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.930162] env[65758]: DEBUG nova.network.neutron [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updated VIF entry in instance network info cache for port d93558c3-4a75-4e02-98d6-de4d1cf9dee2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 900.930695] env[65758]: DEBUG nova.network.neutron [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updating instance_info_cache with network_info: [{"id": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "address": "fa:16:3e:08:ef:77", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93558c3-4a", "ovs_interfaceid": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 900.944355] env[65758]: INFO nova.compute.manager [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Took 0.57 seconds to detach 1 volumes for instance. [ 900.946697] env[65758]: DEBUG nova.compute.manager [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Deleting volume: 21f94ac1-a7a7-4e71-865b-3193eae1848e {{(pid=65758) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3320}} [ 900.966339] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.078791] env[65758]: DEBUG nova.network.neutron [-] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 901.174124] env[65758]: DEBUG nova.network.neutron [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Successfully created port: 875a4c06-92f4-4ace-ba63-e7014651800c {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 901.179149] env[65758]: DEBUG nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 901.281814] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660675, 'name': ReconfigVM_Task, 'duration_secs': 0.76514} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.282674] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Reconfigured VM instance instance-00000046 to attach disk [datastore1] e93528eb-33d0-46d1-94e8-d1d66f2c682f/ephemeral_0.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 901.283956] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f3bd0ae-f1c0-48cb-ad3d-88489e199ecc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.289703] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f86b4e-4bde-4e84-a17d-27e8572859a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.300393] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d5d2d9-e42c-4835-af72-b9881cc71208 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.308018] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 901.308018] env[65758]: value = "task-4660678" [ 901.308018] env[65758]: _type = "Task" [ 901.308018] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.349077] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250cb405-2d0e-4474-a6e3-c075be010492 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.352338] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660678, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.361110] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce80c4a-febc-4673-9308-90262f93dee7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.373688] env[65758]: DEBUG nova.network.neutron [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 901.379029] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e686fd-5ddf-fd8e-518a-ebb922ca27af, 'name': SearchDatastore_Task, 'duration_secs': 0.020211} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.379029] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10c9bb29-6e5d-47df-83e4-d0cdaed02fe7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.396363] env[65758]: DEBUG nova.compute.provider_tree [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.404761] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 901.404761] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529fdb76-061c-00eb-2b79-d802d1d25971" [ 901.404761] env[65758]: _type = "Task" [ 901.404761] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.417551] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529fdb76-061c-00eb-2b79-d802d1d25971, 'name': SearchDatastore_Task, 'duration_secs': 0.010859} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.417885] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.418205] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 875cbc88-f817-4ea8-a969-b97e875918d1/875cbc88-f817-4ea8-a969-b97e875918d1.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 901.418499] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df321bdc-730c-42ed-bdf5-c2424206eb12 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.426528] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 901.426528] env[65758]: value = "task-4660679" [ 901.426528] env[65758]: _type = "Task" [ 901.426528] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.437817] env[65758]: DEBUG oslo_concurrency.lockutils [req-a7b47ca2-9633-48e7-aba6-e91bdfb8cfd0 req-5905fed8-3531-4a85-b8ae-f4a916b3ebe1 service nova] Releasing lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.438378] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.466293] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660676, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.517438] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.581779] env[65758]: INFO nova.compute.manager [-] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Took 1.33 seconds to deallocate network for instance. [ 901.818637] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660678, 'name': Rename_Task, 'duration_secs': 0.208412} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.819059] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.819397] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-287d0579-d379-432c-8e7f-8787a01b0f02 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.827465] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 901.827465] env[65758]: value = "task-4660680" [ 901.827465] env[65758]: _type = "Task" [ 901.827465] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.838847] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660680, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.877301] env[65758]: INFO nova.compute.manager [-] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Took 1.34 seconds to deallocate network for instance. [ 901.882439] env[65758]: DEBUG nova.compute.manager [req-753d4b32-93cd-45e4-bbff-f4cce4aa6c47 req-43856975-ee83-4c20-aa38-16e5267c9f46 service nova] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Received event network-vif-deleted-ea073371-1ad8-47ae-9cca-67a419a8e219 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 901.882705] env[65758]: DEBUG nova.compute.manager [req-753d4b32-93cd-45e4-bbff-f4cce4aa6c47 req-43856975-ee83-4c20-aa38-16e5267c9f46 service nova] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Received event network-vif-deleted-df4cf195-46a9-4de5-ae34-2363de4377f0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 901.901638] env[65758]: DEBUG nova.scheduler.client.report [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.942653] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660679, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.967104] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660676, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.088793] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.195291] env[65758]: DEBUG nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 902.248200] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 902.248424] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.248562] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 902.248820] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.248972] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 902.249140] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 902.249353] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 902.249511] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 902.249674] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 902.249834] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 902.250013] env[65758]: DEBUG nova.virt.hardware [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 902.251014] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a61dab8-6419-4078-af6f-75a8b1e3bcb8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.262403] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048f8fa7-59c5-458a-940f-68c9050e1229 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.339881] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660680, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.388390] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.408448] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.244s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.411541] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.095s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.413056] env[65758]: INFO nova.compute.claims [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.434574] env[65758]: INFO nova.scheduler.client.report [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Deleted allocations for instance 85082b72-89dd-47b7-b8ad-f2ad5ad0638d [ 902.443909] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660679, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658087} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.443909] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 875cbc88-f817-4ea8-a969-b97e875918d1/875cbc88-f817-4ea8-a969-b97e875918d1.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.443909] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.444258] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2debbb5-b0fb-4ac6-9048-919ab25a8380 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.452345] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 902.452345] env[65758]: value = "task-4660681" [ 902.452345] env[65758]: _type = "Task" [ 902.452345] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.469302] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660681, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.473948] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660676, 'name': ReconfigVM_Task, 'duration_secs': 1.698} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.473948] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Reconfigured VM instance instance-00000047 to attach disk [datastore2] ba3153f2-8e6f-469c-8730-957c5eebe97b/ba3153f2-8e6f-469c-8730-957c5eebe97b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.474230] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9dc72e89-9e42-405f-8f8a-670e02c61cb2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.484344] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 902.484344] env[65758]: value = "task-4660682" [ 902.484344] env[65758]: _type = "Task" [ 902.484344] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.499162] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660682, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.760574] env[65758]: DEBUG nova.network.neutron [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Successfully updated port: 875a4c06-92f4-4ace-ba63-e7014651800c {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 902.839098] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660680, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.946879] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0229fc89-e905-437e-83b2-e9702d9a126a tempest-AttachInterfacesV270Test-560008983 tempest-AttachInterfacesV270Test-560008983-project-member] Lock "85082b72-89dd-47b7-b8ad-f2ad5ad0638d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.370s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.963895] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660681, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07493} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.964240] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.965290] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107ba5f4-e4d3-4291-984f-dbb97cf2868e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.989635] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 875cbc88-f817-4ea8-a969-b97e875918d1/875cbc88-f817-4ea8-a969-b97e875918d1.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.990365] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72396bec-b2ce-4a4b-a160-d17fd36efe3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.016964] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660682, 'name': Rename_Task, 'duration_secs': 0.175558} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.019083] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.019083] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 903.019083] env[65758]: value = "task-4660683" [ 903.019083] env[65758]: _type = "Task" [ 903.019083] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.019439] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08e6d7e5-5127-4e0b-b4e1-71f309e79916 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.033810] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 903.033810] env[65758]: value = "task-4660684" [ 903.033810] env[65758]: _type = "Task" [ 903.033810] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.034096] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660683, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.045550] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660684, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.264478] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-12c27fac-98e9-486d-bf36-0580a4e0a163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.264478] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-12c27fac-98e9-486d-bf36-0580a4e0a163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.264478] env[65758]: DEBUG nova.network.neutron [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 903.340812] env[65758]: DEBUG oslo_vmware.api [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660680, 'name': PowerOnVM_Task, 'duration_secs': 1.323586} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.341222] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.341475] env[65758]: INFO nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Took 11.49 seconds to spawn the instance on the hypervisor. [ 903.341717] env[65758]: DEBUG nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 903.342684] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94564a63-23ea-4c6f-a23e-bc819b654564 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.536473] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660683, 'name': ReconfigVM_Task, 'duration_secs': 0.418811} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.545982] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 875cbc88-f817-4ea8-a969-b97e875918d1/875cbc88-f817-4ea8-a969-b97e875918d1.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.548101] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4404168c-0948-44fc-9959-43fad8c64eee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.559350] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660684, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.566046] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 903.566046] env[65758]: value = "task-4660685" [ 903.566046] env[65758]: _type = "Task" [ 903.566046] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.576619] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660685, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.771353] env[65758]: WARNING openstack [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 903.771972] env[65758]: WARNING openstack [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 903.829385] env[65758]: DEBUG nova.network.neutron [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 903.870580] env[65758]: INFO nova.compute.manager [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Took 42.69 seconds to build instance. [ 903.993653] env[65758]: WARNING neutronclient.v2_0.client [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 903.993653] env[65758]: WARNING openstack [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 903.993653] env[65758]: WARNING openstack [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 904.028102] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1f206c-0e4a-429e-8e46-b770ca3316a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.041430] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15a1c84-2736-475d-9cac-fd32e2a5f4f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.051610] env[65758]: DEBUG oslo_vmware.api [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660684, 'name': PowerOnVM_Task, 'duration_secs': 0.595909} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.086672] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.086920] env[65758]: INFO nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Took 9.55 seconds to spawn the instance on the hypervisor. [ 904.087116] env[65758]: DEBUG nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 904.092636] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664a3195-2544-41b7-a287-f05aa974aef2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.098565] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9d4783-ef99-4dfe-bcd7-d1cc5ef0c3f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.107309] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660685, 'name': Rename_Task, 'duration_secs': 0.222096} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.110800] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.114065] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-012f0101-1e01-46b2-8096-30d1410da94f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.117093] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03574861-07ee-4fae-9678-5339d7892f7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.132981] env[65758]: DEBUG nova.compute.provider_tree [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.138545] env[65758]: DEBUG nova.compute.manager [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Received event network-vif-plugged-875a4c06-92f4-4ace-ba63-e7014651800c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 904.138545] env[65758]: DEBUG oslo_concurrency.lockutils [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Acquiring lock "12c27fac-98e9-486d-bf36-0580a4e0a163-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.138545] env[65758]: DEBUG oslo_concurrency.lockutils [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Lock "12c27fac-98e9-486d-bf36-0580a4e0a163-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.138545] env[65758]: DEBUG oslo_concurrency.lockutils [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Lock "12c27fac-98e9-486d-bf36-0580a4e0a163-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.138805] env[65758]: DEBUG nova.compute.manager [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] No waiting events found dispatching network-vif-plugged-875a4c06-92f4-4ace-ba63-e7014651800c {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 904.138805] env[65758]: WARNING nova.compute.manager [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Received unexpected event network-vif-plugged-875a4c06-92f4-4ace-ba63-e7014651800c for instance with vm_state building and task_state spawning. [ 904.138936] env[65758]: DEBUG nova.compute.manager [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Received event network-changed-875a4c06-92f4-4ace-ba63-e7014651800c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 904.139096] env[65758]: DEBUG nova.compute.manager [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Refreshing instance network info cache due to event network-changed-875a4c06-92f4-4ace-ba63-e7014651800c. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 904.139261] env[65758]: DEBUG oslo_concurrency.lockutils [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Acquiring lock "refresh_cache-12c27fac-98e9-486d-bf36-0580a4e0a163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.139875] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 904.139875] env[65758]: value = "task-4660686" [ 904.139875] env[65758]: _type = "Task" [ 904.139875] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.151507] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660686, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.153969] env[65758]: DEBUG nova.network.neutron [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Updating instance_info_cache with network_info: [{"id": "875a4c06-92f4-4ace-ba63-e7014651800c", "address": "fa:16:3e:90:64:f0", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap875a4c06-92", "ovs_interfaceid": "875a4c06-92f4-4ace-ba63-e7014651800c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 904.373678] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6b6c6ba5-7805-4432-b5a3-3d809020a9c9 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.585s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.631038] env[65758]: INFO nova.compute.manager [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Took 42.58 seconds to build instance. [ 904.641517] env[65758]: DEBUG nova.scheduler.client.report [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 904.658646] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-12c27fac-98e9-486d-bf36-0580a4e0a163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.658998] env[65758]: DEBUG nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Instance network_info: |[{"id": "875a4c06-92f4-4ace-ba63-e7014651800c", "address": "fa:16:3e:90:64:f0", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap875a4c06-92", "ovs_interfaceid": "875a4c06-92f4-4ace-ba63-e7014651800c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 904.659957] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660686, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.659957] env[65758]: DEBUG oslo_concurrency.lockutils [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Acquired lock "refresh_cache-12c27fac-98e9-486d-bf36-0580a4e0a163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.659957] env[65758]: DEBUG nova.network.neutron [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Refreshing network info cache for port 875a4c06-92f4-4ace-ba63-e7014651800c {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 904.660985] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:64:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '875a4c06-92f4-4ace-ba63-e7014651800c', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 904.670310] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating folder: Project (bad3e3c7054c424a800cb12e9c5dbb31). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 904.671695] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3472a4a3-83b4-4e69-b6e7-0b36fa81d898 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.685251] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Created folder: Project (bad3e3c7054c424a800cb12e9c5dbb31) in parent group-v909763. [ 904.685458] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating folder: Instances. Parent ref: group-v909955. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 904.685724] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf4d5d61-472d-4361-b8d2-ccb1bfebd42a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.700413] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Created folder: Instances in parent group-v909955. [ 904.700976] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 904.701147] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 904.701748] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91e1ec36-931d-4390-b33a-da83b8dfa328 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.729231] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 904.729231] env[65758]: value = "task-4660689" [ 904.729231] env[65758]: _type = "Task" [ 904.729231] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.738856] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660689, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.131589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f7b9289f-ba46-4e8d-a38a-ce17dd20e826 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.833s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.147010] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.736s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.147737] env[65758]: DEBUG nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 905.156518] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.447s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.156518] env[65758]: DEBUG nova.objects.instance [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lazy-loading 'resources' on Instance uuid cca3e019-8e82-4473-8609-291703762a6e {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.167745] env[65758]: DEBUG oslo_vmware.api [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660686, 'name': PowerOnVM_Task, 'duration_secs': 0.838262} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.169184] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.169184] env[65758]: INFO nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Took 7.84 seconds to spawn the instance on the hypervisor. [ 905.169745] env[65758]: DEBUG nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 905.170235] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb172fcd-fe2e-49ed-a0fd-b0f028463d26 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.174894] env[65758]: WARNING neutronclient.v2_0.client [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 905.175737] env[65758]: WARNING openstack [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 905.176051] env[65758]: WARNING openstack [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 905.244576] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660689, 'name': CreateVM_Task, 'duration_secs': 0.497471} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.244780] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.245333] env[65758]: WARNING neutronclient.v2_0.client [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 905.245700] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.245851] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.246247] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 905.247453] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a24c1f90-3b05-48e6-9464-934a30f5ec59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.255601] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 905.255601] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52160b60-a929-e00e-a275-cb4ec5212953" [ 905.255601] env[65758]: _type = "Task" [ 905.255601] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.266577] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52160b60-a929-e00e-a275-cb4ec5212953, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.438476] env[65758]: WARNING neutronclient.v2_0.client [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 905.439171] env[65758]: WARNING openstack [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 905.439533] env[65758]: WARNING openstack [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 905.552170] env[65758]: DEBUG nova.network.neutron [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Updated VIF entry in instance network info cache for port 875a4c06-92f4-4ace-ba63-e7014651800c. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 905.552552] env[65758]: DEBUG nova.network.neutron [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Updating instance_info_cache with network_info: [{"id": "875a4c06-92f4-4ace-ba63-e7014651800c", "address": "fa:16:3e:90:64:f0", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap875a4c06-92", "ovs_interfaceid": "875a4c06-92f4-4ace-ba63-e7014651800c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 905.654316] env[65758]: DEBUG nova.compute.utils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 905.655914] env[65758]: DEBUG nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 905.656147] env[65758]: DEBUG nova.network.neutron [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 905.656495] env[65758]: WARNING neutronclient.v2_0.client [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 905.656808] env[65758]: WARNING neutronclient.v2_0.client [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 905.657510] env[65758]: WARNING openstack [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 905.657853] env[65758]: WARNING openstack [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 905.708792] env[65758]: INFO nova.compute.manager [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Took 41.04 seconds to build instance. [ 905.736025] env[65758]: DEBUG nova.policy [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25478cf565774182b96cdb93cc82f50e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68c10a44fcb84ade915f8dcabbb9a8c7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 905.769620] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52160b60-a929-e00e-a275-cb4ec5212953, 'name': SearchDatastore_Task, 'duration_secs': 0.032766} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.769620] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.769844] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.770093] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.770238] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.770532] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.770687] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-caa06d6a-f9f1-4b5e-ac81-112655993461 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.782708] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.782944] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 905.783740] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebbd8f7b-3676-4f31-b7b8-8d6e8aa688c7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.791237] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 905.791237] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5218bc17-6993-8c05-434e-d9b13da4b5a9" [ 905.791237] env[65758]: _type = "Task" [ 905.791237] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.800605] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5218bc17-6993-8c05-434e-d9b13da4b5a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.057203] env[65758]: DEBUG oslo_concurrency.lockutils [req-6f77c468-1186-46ce-957b-7cd706d0edb6 req-b161d70c-57ac-46a5-a4cb-f74f84fef7de service nova] Releasing lock "refresh_cache-12c27fac-98e9-486d-bf36-0580a4e0a163" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.166801] env[65758]: DEBUG nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 906.187308] env[65758]: DEBUG nova.network.neutron [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Successfully created port: 76336f1a-f792-496d-bde3-e6466ebd059d {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 906.210805] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a70d39c-0fde-46d9-9371-7867ea7771d8 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "875cbc88-f817-4ea8-a969-b97e875918d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.358s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.224038] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aef7162-2af0-459e-9e1b-8e7e87a787ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.232950] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5031b879-792b-4b0f-9f51-8fa1c43eaff3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.270310] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afa6fca-ca58-40c2-bf77-582bd8d68e76 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.278832] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b8e9b8-4397-449d-b1fa-a070a77a7d94 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.294640] env[65758]: DEBUG nova.compute.provider_tree [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.306348] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5218bc17-6993-8c05-434e-d9b13da4b5a9, 'name': SearchDatastore_Task, 'duration_secs': 0.011917} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.308072] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0d66517-16c8-42cd-bc1b-1c3a792431aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.316766] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 906.316766] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f925a1-aa99-2824-4b7b-ed6e0b91bff4" [ 906.316766] env[65758]: _type = "Task" [ 906.316766] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.326950] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f925a1-aa99-2824-4b7b-ed6e0b91bff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.609994] env[65758]: DEBUG nova.compute.manager [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Received event network-changed-25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 906.609994] env[65758]: DEBUG nova.compute.manager [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Refreshing instance network info cache due to event network-changed-25549e11-fab5-4462-b69b-5fa3581f6d34. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 906.609994] env[65758]: DEBUG oslo_concurrency.lockutils [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] Acquiring lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.609994] env[65758]: DEBUG oslo_concurrency.lockutils [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] Acquired lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.609994] env[65758]: DEBUG nova.network.neutron [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Refreshing network info cache for port 25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 906.801502] env[65758]: DEBUG nova.scheduler.client.report [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.829848] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f925a1-aa99-2824-4b7b-ed6e0b91bff4, 'name': SearchDatastore_Task, 'duration_secs': 0.014283} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.830082] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.830339] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 12c27fac-98e9-486d-bf36-0580a4e0a163/12c27fac-98e9-486d-bf36-0580a4e0a163.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.830613] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed8e724a-45f3-40cc-a5bf-a398062ada8e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.838837] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 906.838837] env[65758]: value = "task-4660690" [ 906.838837] env[65758]: _type = "Task" [ 906.838837] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.851953] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.116761] env[65758]: WARNING neutronclient.v2_0.client [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 907.117496] env[65758]: WARNING openstack [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 907.117901] env[65758]: WARNING openstack [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 907.181304] env[65758]: DEBUG nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 907.216441] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 907.216441] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 907.216441] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 907.216441] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 907.216673] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 907.217364] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 907.217636] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.217827] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 907.218053] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 907.218262] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 907.218468] env[65758]: DEBUG nova.virt.hardware [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 907.219634] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6cd060-7859-4393-b635-3d558a964bf8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.235336] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce418c43-4302-4267-875c-270b748ea4a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.309317] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.152s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.310054] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.211s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.310328] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.310529] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 907.310938] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 31.027s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.313373] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0986a9ab-a8e2-440f-9e9e-ac22b6f6584f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.323633] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9948036e-9e01-4a42-b12f-8a7425b59f92 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.344718] env[65758]: INFO nova.scheduler.client.report [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Deleted allocations for instance cca3e019-8e82-4473-8609-291703762a6e [ 907.353323] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8106f6a2-1d4f-4ba9-9917-3cb23d386a3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.371417] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660690, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.374828] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce33e6d2-8fda-4604-8e00-95abdec9477f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.412102] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177532MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 907.412270] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.413825] env[65758]: WARNING neutronclient.v2_0.client [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 907.414509] env[65758]: WARNING openstack [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 907.414858] env[65758]: WARNING openstack [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 907.519154] env[65758]: DEBUG nova.network.neutron [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Updated VIF entry in instance network info cache for port 25549e11-fab5-4462-b69b-5fa3581f6d34. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 907.519154] env[65758]: DEBUG nova.network.neutron [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Updating instance_info_cache with network_info: [{"id": "25549e11-fab5-4462-b69b-5fa3581f6d34", "address": "fa:16:3e:8d:f4:ce", "network": {"id": "3770aad6-39a0-41da-84d1-b6aa69c0dfad", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-982589002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45aad313d10447e9ba61ed0a05b915ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25549e11-fa", "ovs_interfaceid": "25549e11-fab5-4462-b69b-5fa3581f6d34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 907.861317] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.988532} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.861658] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 12c27fac-98e9-486d-bf36-0580a4e0a163/12c27fac-98e9-486d-bf36-0580a4e0a163.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 907.861911] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 907.864503] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-254a8f2c-cd4b-43a5-b68a-1ce8d5d2e5fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.868016] env[65758]: DEBUG oslo_concurrency.lockutils [None req-647767de-92b9-4c83-b82d-8e3d583b48ce tempest-FloatingIPsAssociationTestJSON-1968318318 tempest-FloatingIPsAssociationTestJSON-1968318318-project-member] Lock "cca3e019-8e82-4473-8609-291703762a6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.293s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.872257] env[65758]: DEBUG nova.network.neutron [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Successfully updated port: 76336f1a-f792-496d-bde3-e6466ebd059d {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 907.889238] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 907.889238] env[65758]: value = "task-4660691" [ 907.889238] env[65758]: _type = "Task" [ 907.889238] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.900964] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.020712] env[65758]: DEBUG oslo_concurrency.lockutils [req-e560764f-ad09-4f33-ad22-60e5bae4e479 req-6f1ced70-18a3-472b-8768-274528fb483d service nova] Releasing lock "refresh_cache-e93528eb-33d0-46d1-94e8-d1d66f2c682f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.090358] env[65758]: DEBUG nova.compute.manager [req-4d427661-e8b0-400a-acba-9197312a50bc req-d67505ba-63cd-4333-bf6d-8bb35dea77e2 service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Received event network-vif-plugged-76336f1a-f792-496d-bde3-e6466ebd059d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 908.090798] env[65758]: DEBUG oslo_concurrency.lockutils [req-4d427661-e8b0-400a-acba-9197312a50bc req-d67505ba-63cd-4333-bf6d-8bb35dea77e2 service nova] Acquiring lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.090875] env[65758]: DEBUG oslo_concurrency.lockutils [req-4d427661-e8b0-400a-acba-9197312a50bc req-d67505ba-63cd-4333-bf6d-8bb35dea77e2 service nova] Lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.091047] env[65758]: DEBUG oslo_concurrency.lockutils [req-4d427661-e8b0-400a-acba-9197312a50bc req-d67505ba-63cd-4333-bf6d-8bb35dea77e2 service nova] Lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.091214] env[65758]: DEBUG nova.compute.manager [req-4d427661-e8b0-400a-acba-9197312a50bc req-d67505ba-63cd-4333-bf6d-8bb35dea77e2 service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] No waiting events found dispatching network-vif-plugged-76336f1a-f792-496d-bde3-e6466ebd059d {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 908.091371] env[65758]: WARNING nova.compute.manager [req-4d427661-e8b0-400a-acba-9197312a50bc req-d67505ba-63cd-4333-bf6d-8bb35dea77e2 service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Received unexpected event network-vif-plugged-76336f1a-f792-496d-bde3-e6466ebd059d for instance with vm_state building and task_state spawning. [ 908.383216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquiring lock "refresh_cache-aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.383506] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquired lock "refresh_cache-aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.384132] env[65758]: DEBUG nova.network.neutron [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 908.385851] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af619c03-90bc-4fd2-959b-b7995c6f887f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.407789] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8be7550-d0b7-4c94-b4aa-e9d239d7cfd8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.412411] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121268} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.412718] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.413978] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9f857f-eb8e-4381-977d-e10bd4b6344f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.449793] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3f9cfa-47ba-44ab-b8c3-dc8de789f5bc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.473961] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 12c27fac-98e9-486d-bf36-0580a4e0a163/12c27fac-98e9-486d-bf36-0580a4e0a163.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.474896] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09c053e9-c0c7-4105-a75b-7f7f9598f037 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.494295] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17716cb6-8f00-489b-a522-0d54ea2e0607 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.499648] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 908.499648] env[65758]: value = "task-4660692" [ 908.499648] env[65758]: _type = "Task" [ 908.499648] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.517458] env[65758]: DEBUG nova.compute.provider_tree [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.521703] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.641303] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "875cbc88-f817-4ea8-a969-b97e875918d1" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.641562] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "875cbc88-f817-4ea8-a969-b97e875918d1" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.641796] env[65758]: INFO nova.compute.manager [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Rebooting instance [ 908.659548] env[65758]: DEBUG nova.compute.manager [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received event network-changed-cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 908.659548] env[65758]: DEBUG nova.compute.manager [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Refreshing instance network info cache due to event network-changed-cdcc66de-e599-4e26-8757-617493c55e00. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 908.659662] env[65758]: DEBUG oslo_concurrency.lockutils [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Acquiring lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.660019] env[65758]: DEBUG oslo_concurrency.lockutils [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Acquired lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.660019] env[65758]: DEBUG nova.network.neutron [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Refreshing network info cache for port cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 908.894488] env[65758]: WARNING openstack [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 908.894926] env[65758]: WARNING openstack [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 909.015762] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660692, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.017143] env[65758]: DEBUG nova.network.neutron [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 909.023551] env[65758]: DEBUG nova.scheduler.client.report [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.112298] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.112502] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.118816] env[65758]: WARNING neutronclient.v2_0.client [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 909.119462] env[65758]: WARNING openstack [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 909.119814] env[65758]: WARNING openstack [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 909.162251] env[65758]: WARNING neutronclient.v2_0.client [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 909.162949] env[65758]: WARNING openstack [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 909.163377] env[65758]: WARNING openstack [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 909.171183] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.171389] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquired lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.171560] env[65758]: DEBUG nova.network.neutron [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 909.253284] env[65758]: DEBUG nova.network.neutron [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Updating instance_info_cache with network_info: [{"id": "76336f1a-f792-496d-bde3-e6466ebd059d", "address": "fa:16:3e:63:28:79", "network": {"id": "36c4d0cb-46e4-44bc-9040-b11274c33a24", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2123130903-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68c10a44fcb84ade915f8dcabbb9a8c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76336f1a-f7", "ovs_interfaceid": "76336f1a-f792-496d-bde3-e6466ebd059d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 909.515844] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660692, 'name': ReconfigVM_Task, 'duration_secs': 1.014282} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.516139] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 12c27fac-98e9-486d-bf36-0580a4e0a163/12c27fac-98e9-486d-bf36-0580a4e0a163.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.516743] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7540f77b-4780-49cc-8b07-90a868b6f0c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.525107] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 909.525107] env[65758]: value = "task-4660693" [ 909.525107] env[65758]: _type = "Task" [ 909.525107] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.529372] env[65758]: WARNING neutronclient.v2_0.client [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 909.530011] env[65758]: WARNING openstack [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 909.530643] env[65758]: WARNING openstack [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 909.549036] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660693, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.619080] env[65758]: DEBUG nova.compute.utils [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 909.635079] env[65758]: DEBUG nova.network.neutron [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updated VIF entry in instance network info cache for port cdcc66de-e599-4e26-8757-617493c55e00. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 909.635434] env[65758]: DEBUG nova.network.neutron [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [{"id": "cdcc66de-e599-4e26-8757-617493c55e00", "address": "fa:16:3e:6f:f2:e7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcc66de-e5", "ovs_interfaceid": "cdcc66de-e599-4e26-8757-617493c55e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 909.673719] env[65758]: WARNING neutronclient.v2_0.client [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 909.674584] env[65758]: WARNING openstack [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 909.675094] env[65758]: WARNING openstack [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 909.760310] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Releasing lock "refresh_cache-aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.760749] env[65758]: DEBUG nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Instance network_info: |[{"id": "76336f1a-f792-496d-bde3-e6466ebd059d", "address": "fa:16:3e:63:28:79", "network": {"id": "36c4d0cb-46e4-44bc-9040-b11274c33a24", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2123130903-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68c10a44fcb84ade915f8dcabbb9a8c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76336f1a-f7", "ovs_interfaceid": "76336f1a-f792-496d-bde3-e6466ebd059d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 909.761432] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:28:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76336f1a-f792-496d-bde3-e6466ebd059d', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 909.770727] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Creating folder: Project (68c10a44fcb84ade915f8dcabbb9a8c7). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 909.771103] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0738b50-6d9b-4327-b9cb-f43794ae4bef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.786805] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Created folder: Project (68c10a44fcb84ade915f8dcabbb9a8c7) in parent group-v909763. [ 909.786805] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Creating folder: Instances. Parent ref: group-v909958. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 909.786805] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5f88a02-f77d-4bb8-a0b2-4d6dc57515bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.802193] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Created folder: Instances in parent group-v909958. [ 909.802487] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 909.802739] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 909.802987] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77c38604-05d4-418d-a96a-88fa7cacf47f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.823302] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 909.823302] env[65758]: value = "task-4660696" [ 909.823302] env[65758]: _type = "Task" [ 909.823302] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.832492] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660696, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.948815] env[65758]: WARNING neutronclient.v2_0.client [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 909.949664] env[65758]: WARNING openstack [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 909.951630] env[65758]: WARNING openstack [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 910.036709] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660693, 'name': Rename_Task, 'duration_secs': 0.269367} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.037115] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 910.037487] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7f984a9-9932-4350-a8ab-2db5023c9939 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.045567] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.735s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.048599] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 910.048599] env[65758]: value = "task-4660697" [ 910.048599] env[65758]: _type = "Task" [ 910.048599] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.051180] env[65758]: DEBUG nova.network.neutron [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updating instance_info_cache with network_info: [{"id": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "address": "fa:16:3e:08:ef:77", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93558c3-4a", "ovs_interfaceid": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 910.053039] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.648s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.053039] env[65758]: DEBUG nova.objects.instance [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lazy-loading 'resources' on Instance uuid f15c6953-f76b-44eb-bd1b-c0d3adddc163 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 910.063779] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.121841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.138743] env[65758]: DEBUG oslo_concurrency.lockutils [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Releasing lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.139138] env[65758]: DEBUG nova.compute.manager [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Received event network-changed-d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 910.139363] env[65758]: DEBUG nova.compute.manager [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Refreshing instance network info cache due to event network-changed-d93558c3-4a75-4e02-98d6-de4d1cf9dee2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 910.139606] env[65758]: DEBUG oslo_concurrency.lockutils [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Acquiring lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.333981] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660696, 'name': CreateVM_Task, 'duration_secs': 0.469453} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.334235] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 910.334705] env[65758]: WARNING neutronclient.v2_0.client [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 910.335083] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.335380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.335715] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 910.336026] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0504c82b-8c7f-410e-9eca-4b5b0a8c1118 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.342713] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 910.342713] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5234ea31-59a6-28d4-9874-dffd67502376" [ 910.342713] env[65758]: _type = "Task" [ 910.342713] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.353158] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5234ea31-59a6-28d4-9874-dffd67502376, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.557988] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Releasing lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.562796] env[65758]: DEBUG oslo_concurrency.lockutils [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Acquired lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.563040] env[65758]: DEBUG nova.network.neutron [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Refreshing network info cache for port d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 910.571070] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660697, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.618046] env[65758]: INFO nova.scheduler.client.report [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted allocation for migration cbce059b-48af-4be4-a4d3-19366314e65f [ 910.858767] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5234ea31-59a6-28d4-9874-dffd67502376, 'name': SearchDatastore_Task, 'duration_secs': 0.020497} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.861399] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.861399] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.861399] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.861399] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.861399] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.862963] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d863fcd0-8734-4da9-a456-14018e3f74d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.866130] env[65758]: DEBUG nova.compute.manager [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Received event network-changed-76336f1a-f792-496d-bde3-e6466ebd059d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 910.866335] env[65758]: DEBUG nova.compute.manager [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Refreshing instance network info cache due to event network-changed-76336f1a-f792-496d-bde3-e6466ebd059d. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 910.866546] env[65758]: DEBUG oslo_concurrency.lockutils [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] Acquiring lock "refresh_cache-aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.866696] env[65758]: DEBUG oslo_concurrency.lockutils [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] Acquired lock "refresh_cache-aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.866920] env[65758]: DEBUG nova.network.neutron [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Refreshing network info cache for port 76336f1a-f792-496d-bde3-e6466ebd059d {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 910.878719] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.878915] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.880088] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d11d7b9-4845-4365-898d-224eb4039b9a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.891886] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 910.891886] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523bc828-0911-77e3-85e1-f4fe3cfa2199" [ 910.891886] env[65758]: _type = "Task" [ 910.891886] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.901762] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523bc828-0911-77e3-85e1-f4fe3cfa2199, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.073878] env[65758]: WARNING neutronclient.v2_0.client [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 911.074685] env[65758]: WARNING openstack [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 911.075268] env[65758]: WARNING openstack [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 911.082772] env[65758]: DEBUG oslo_vmware.api [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660697, 'name': PowerOnVM_Task, 'duration_secs': 0.71557} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.086146] env[65758]: DEBUG nova.compute.manager [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 911.087034] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 911.087034] env[65758]: INFO nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Took 8.89 seconds to spawn the instance on the hypervisor. [ 911.087034] env[65758]: DEBUG nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 911.087756] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e237d179-5ef9-4c6f-a2ca-d3a872862f20 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.091726] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18400fb-9ee4-4bcb-92ed-f9fd65a8038c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.126388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e7f49d34-a037-439b-be4e-0959fa7ea05e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 38.339s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.130321] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa712c0a-7cbc-4ce0-9a29-1b76b0bb4b45 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.140823] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f597d6d2-de50-4583-bd83-2cd4dcce05ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.182066] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38db5bf-ca90-4fe6-a610-5a203f619d6b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.197195] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc4056e-0bd6-47de-97ae-bc824c386717 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.214062] env[65758]: DEBUG nova.compute.provider_tree [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.233926] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.234241] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.234871] env[65758]: INFO nova.compute.manager [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Attaching volume fbd302a4-8737-4848-94c0-7cfc81983fb5 to /dev/sdb [ 911.280517] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1ce480-4dc8-460d-b6a0-a69df214f8eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.289269] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e677eb-3696-4ef3-8a62-b9efbaa5a61b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.306332] env[65758]: DEBUG nova.virt.block_device [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updating existing volume attachment record: 4f7c6880-75ac-474f-a85e-239893d3f1dc {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 911.309611] env[65758]: WARNING neutronclient.v2_0.client [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 911.310321] env[65758]: WARNING openstack [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 911.310680] env[65758]: WARNING openstack [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 911.370372] env[65758]: WARNING neutronclient.v2_0.client [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 911.371041] env[65758]: WARNING openstack [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 911.371419] env[65758]: WARNING openstack [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 911.402042] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523bc828-0911-77e3-85e1-f4fe3cfa2199, 'name': SearchDatastore_Task, 'duration_secs': 0.015662} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.403131] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-575a2516-3c3e-45b1-91ba-3305a8fb1da5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.409906] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 911.409906] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52502808-c616-521d-a211-5981959ccbc4" [ 911.409906] env[65758]: _type = "Task" [ 911.409906] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.418880] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52502808-c616-521d-a211-5981959ccbc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.421841] env[65758]: DEBUG nova.network.neutron [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updated VIF entry in instance network info cache for port d93558c3-4a75-4e02-98d6-de4d1cf9dee2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 911.422232] env[65758]: DEBUG nova.network.neutron [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updating instance_info_cache with network_info: [{"id": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "address": "fa:16:3e:08:ef:77", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93558c3-4a", "ovs_interfaceid": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 911.602188] env[65758]: WARNING neutronclient.v2_0.client [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 911.603229] env[65758]: WARNING openstack [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 911.603366] env[65758]: WARNING openstack [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 911.632220] env[65758]: INFO nova.compute.manager [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Took 40.49 seconds to build instance. [ 911.719071] env[65758]: DEBUG nova.scheduler.client.report [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 911.749690] env[65758]: DEBUG nova.network.neutron [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Updated VIF entry in instance network info cache for port 76336f1a-f792-496d-bde3-e6466ebd059d. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 911.750088] env[65758]: DEBUG nova.network.neutron [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Updating instance_info_cache with network_info: [{"id": "76336f1a-f792-496d-bde3-e6466ebd059d", "address": "fa:16:3e:63:28:79", "network": {"id": "36c4d0cb-46e4-44bc-9040-b11274c33a24", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2123130903-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68c10a44fcb84ade915f8dcabbb9a8c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76336f1a-f7", "ovs_interfaceid": "76336f1a-f792-496d-bde3-e6466ebd059d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 911.923829] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52502808-c616-521d-a211-5981959ccbc4, 'name': SearchDatastore_Task, 'duration_secs': 0.010421} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.924186] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.924486] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] aa2f1858-2bb2-4f12-bc05-ef6913ef36e2/aa2f1858-2bb2-4f12-bc05-ef6913ef36e2.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 911.924820] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f9dd0bf-30f7-47b4-84d6-e094e22fdb45 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.927462] env[65758]: DEBUG oslo_concurrency.lockutils [req-968eb091-9045-472b-a95f-36475cf2cb95 req-f56e5f9d-e6f6-4a41-8a58-c06286446633 service nova] Releasing lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.934522] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 911.934522] env[65758]: value = "task-4660701" [ 911.934522] env[65758]: _type = "Task" [ 911.934522] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.944940] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.132599] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e9ac12ce-2b10-4255-829c-40a3de968aa7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "12c27fac-98e9-486d-bf36-0580a4e0a163" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.326s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.133957] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31b7ed1-edbe-47ce-8c61-465486793067 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.145941] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Doing hard reboot of VM {{(pid=65758) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 912.146337] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-517430ac-9db4-48f1-92f0-586ba61de335 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.156215] env[65758]: DEBUG oslo_vmware.api [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 912.156215] env[65758]: value = "task-4660702" [ 912.156215] env[65758]: _type = "Task" [ 912.156215] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.169423] env[65758]: DEBUG oslo_vmware.api [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660702, 'name': ResetVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.224857] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.172s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.228835] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.284s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.229431] env[65758]: DEBUG nova.objects.instance [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lazy-loading 'resources' on Instance uuid b7e2a3d9-7db3-40b3-98a5-c6e6e040a947 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.253583] env[65758]: DEBUG oslo_concurrency.lockutils [req-38f9b376-7864-4b5e-acbb-df853c46d016 req-564b2c67-4f3b-4dd6-92e8-d015d2f49a88 service nova] Releasing lock "refresh_cache-aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.257228] env[65758]: INFO nova.scheduler.client.report [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Deleted allocations for instance f15c6953-f76b-44eb-bd1b-c0d3adddc163 [ 912.448307] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660701, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.669208] env[65758]: DEBUG oslo_vmware.api [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660702, 'name': ResetVM_Task, 'duration_secs': 0.189869} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.669208] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Did hard reboot of VM {{(pid=65758) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 912.669681] env[65758]: DEBUG nova.compute.manager [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 912.670173] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c3f5d4-0a35-41db-9e9a-4ac74250b379 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.768612] env[65758]: DEBUG oslo_concurrency.lockutils [None req-814fee69-8349-4172-a769-cff1f861b6f7 tempest-ListImageFiltersTestJSON-1542069529 tempest-ListImageFiltersTestJSON-1542069529-project-member] Lock "f15c6953-f76b-44eb-bd1b-c0d3adddc163" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.968s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.933069] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "12c27fac-98e9-486d-bf36-0580a4e0a163" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.933069] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "12c27fac-98e9-486d-bf36-0580a4e0a163" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.933286] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "12c27fac-98e9-486d-bf36-0580a4e0a163-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.933490] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "12c27fac-98e9-486d-bf36-0580a4e0a163-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.933661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "12c27fac-98e9-486d-bf36-0580a4e0a163-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.936338] env[65758]: INFO nova.compute.manager [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Terminating instance [ 912.952351] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660701, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599155} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.955155] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] aa2f1858-2bb2-4f12-bc05-ef6913ef36e2/aa2f1858-2bb2-4f12-bc05-ef6913ef36e2.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 912.955384] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 912.956132] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e79379d5-a56f-4500-aa4f-d7d15dcb5545 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.965406] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 912.965406] env[65758]: value = "task-4660703" [ 912.965406] env[65758]: _type = "Task" [ 912.965406] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.980423] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.154744] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9c7144-58e9-488c-a631-72921e790508 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.163762] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401ae4dd-cb20-440a-9ebf-d47d1c2689e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.197356] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd551078-15a4-4e24-ae79-e3e1b9ee7eff tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "875cbc88-f817-4ea8-a969-b97e875918d1" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.555s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.198430] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc80f59a-e5f2-4ff6-be6c-4adefbc0279d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.207452] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb13777-055f-4880-87cd-970ff806a21e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.223931] env[65758]: DEBUG nova.compute.provider_tree [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.402200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.402200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.402200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.402200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.402200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.404038] env[65758]: INFO nova.compute.manager [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Terminating instance [ 913.446977] env[65758]: DEBUG nova.compute.manager [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 913.447115] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 913.448569] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6da805-1651-4899-8ec6-f6263eb7611f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.458407] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.458700] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85dad738-835a-4099-9dd2-60986c2e143a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.467310] env[65758]: DEBUG oslo_vmware.api [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 913.467310] env[65758]: value = "task-4660704" [ 913.467310] env[65758]: _type = "Task" [ 913.467310] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.481224] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075461} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.484858] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.485343] env[65758]: DEBUG oslo_vmware.api [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.486169] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c63e55c-d58a-453b-bed7-058c42b8f4cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.512734] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] aa2f1858-2bb2-4f12-bc05-ef6913ef36e2/aa2f1858-2bb2-4f12-bc05-ef6913ef36e2.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.513176] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0791349-baae-483c-9d0d-283f7d06a415 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.534992] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 913.534992] env[65758]: value = "task-4660705" [ 913.534992] env[65758]: _type = "Task" [ 913.534992] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.547886] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660705, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.727277] env[65758]: DEBUG nova.scheduler.client.report [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.912246] env[65758]: DEBUG nova.compute.manager [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 913.913832] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 913.915100] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c75af00-dab9-47d8-8e70-80162ea46c10 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.925689] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.926109] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be107160-4ace-474b-98c9-aa736ee8a83c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.937480] env[65758]: DEBUG oslo_vmware.api [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 913.937480] env[65758]: value = "task-4660707" [ 913.937480] env[65758]: _type = "Task" [ 913.937480] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.951813] env[65758]: DEBUG oslo_vmware.api [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.970921] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "3ff9192b-3956-49f6-afd2-827759826056" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.971980] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.984814] env[65758]: DEBUG oslo_vmware.api [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660704, 'name': PowerOffVM_Task, 'duration_secs': 0.38702} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.985666] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.986098] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.986195] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d07f3974-eb34-45af-9288-6e37b3ebc992 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.049457] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660705, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.068225] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 914.068928] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 914.068928] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleting the datastore file [datastore2] 12c27fac-98e9-486d-bf36-0580a4e0a163 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 914.069231] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7dc8201d-814e-4b94-9a19-5e1cf81d3fce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.077961] env[65758]: DEBUG oslo_vmware.api [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 914.077961] env[65758]: value = "task-4660709" [ 914.077961] env[65758]: _type = "Task" [ 914.077961] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.088449] env[65758]: DEBUG oslo_vmware.api [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.236778] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.240194] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.910s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.242382] env[65758]: INFO nova.compute.claims [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.275417] env[65758]: INFO nova.scheduler.client.report [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Deleted allocations for instance b7e2a3d9-7db3-40b3-98a5-c6e6e040a947 [ 914.351360] env[65758]: DEBUG nova.compute.manager [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Received event network-changed-d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 914.351570] env[65758]: DEBUG nova.compute.manager [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Refreshing instance network info cache due to event network-changed-d93558c3-4a75-4e02-98d6-de4d1cf9dee2. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 914.351853] env[65758]: DEBUG oslo_concurrency.lockutils [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] Acquiring lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.352025] env[65758]: DEBUG oslo_concurrency.lockutils [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] Acquired lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.352233] env[65758]: DEBUG nova.network.neutron [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Refreshing network info cache for port d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 914.449196] env[65758]: DEBUG oslo_vmware.api [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660707, 'name': PowerOffVM_Task, 'duration_secs': 0.339188} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.449485] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.449647] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 914.449917] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9a3eb44-a262-44bb-a8ce-2bc5a1135a0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.478743] env[65758]: DEBUG nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 914.524611] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 914.524829] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 914.525011] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleting the datastore file [datastore1] a9ec9a64-94c7-41a5-a7a4-5e034ddfc592 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 914.525558] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66307147-ca08-4293-9554-6f4e930ea120 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.534192] env[65758]: DEBUG oslo_vmware.api [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 914.534192] env[65758]: value = "task-4660711" [ 914.534192] env[65758]: _type = "Task" [ 914.534192] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.546919] env[65758]: DEBUG oslo_vmware.api [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.549595] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660705, 'name': ReconfigVM_Task, 'duration_secs': 0.522917} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.550128] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Reconfigured VM instance instance-0000004b to attach disk [datastore2] aa2f1858-2bb2-4f12-bc05-ef6913ef36e2/aa2f1858-2bb2-4f12-bc05-ef6913ef36e2.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.550368] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8c23051-234f-42eb-a279-830bccd5e627 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.558534] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 914.558534] env[65758]: value = "task-4660712" [ 914.558534] env[65758]: _type = "Task" [ 914.558534] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.569290] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660712, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.587820] env[65758]: DEBUG oslo_vmware.api [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210198} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.588229] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.588445] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.588621] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.588788] env[65758]: INFO nova.compute.manager [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Took 1.14 seconds to destroy the instance on the hypervisor. [ 914.589046] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 914.589256] env[65758]: DEBUG nova.compute.manager [-] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 914.589349] env[65758]: DEBUG nova.network.neutron [-] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 914.589600] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 914.590142] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 914.590407] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 914.641522] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 914.784555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fa280f91-7901-40d7-bd37-16bcf3c07045 tempest-ServersTestManualDisk-1769110030 tempest-ServersTestManualDisk-1769110030-project-member] Lock "b7e2a3d9-7db3-40b3-98a5-c6e6e040a947" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.629s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.855799] env[65758]: WARNING neutronclient.v2_0.client [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 914.856679] env[65758]: WARNING openstack [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 914.857262] env[65758]: WARNING openstack [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 914.951940] env[65758]: DEBUG nova.compute.manager [req-7c8124e6-2f56-4abb-bbba-a0d5e3d7d54e req-3acbcce3-b0d5-42b9-a2e6-da03b5f623cc service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Received event network-vif-deleted-875a4c06-92f4-4ace-ba63-e7014651800c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 914.952236] env[65758]: INFO nova.compute.manager [req-7c8124e6-2f56-4abb-bbba-a0d5e3d7d54e req-3acbcce3-b0d5-42b9-a2e6-da03b5f623cc service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Neutron deleted interface 875a4c06-92f4-4ace-ba63-e7014651800c; detaching it from the instance and deleting it from the info cache [ 914.952432] env[65758]: DEBUG nova.network.neutron [req-7c8124e6-2f56-4abb-bbba-a0d5e3d7d54e req-3acbcce3-b0d5-42b9-a2e6-da03b5f623cc service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 915.002671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.026469] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "875cbc88-f817-4ea8-a969-b97e875918d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.026963] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "875cbc88-f817-4ea8-a969-b97e875918d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.027313] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "875cbc88-f817-4ea8-a969-b97e875918d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.027547] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "875cbc88-f817-4ea8-a969-b97e875918d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.027784] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "875cbc88-f817-4ea8-a969-b97e875918d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.031708] env[65758]: INFO nova.compute.manager [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Terminating instance [ 915.049252] env[65758]: DEBUG oslo_vmware.api [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154738} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.049557] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.049738] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.049912] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 915.050092] env[65758]: INFO nova.compute.manager [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Took 1.14 seconds to destroy the instance on the hypervisor. [ 915.050342] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 915.050549] env[65758]: DEBUG nova.compute.manager [-] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 915.050628] env[65758]: DEBUG nova.network.neutron [-] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 915.050866] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 915.051640] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 915.051907] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 915.069683] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660712, 'name': Rename_Task, 'duration_secs': 0.203679} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.069683] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.069860] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e16e6bd-1f1c-490d-aa83-8b56f1acde68 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.078971] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 915.078971] env[65758]: value = "task-4660713" [ 915.078971] env[65758]: _type = "Task" [ 915.078971] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.088937] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.214090] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 915.428899] env[65758]: DEBUG nova.network.neutron [-] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 915.455683] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75c79fac-0e7f-4c25-8316-72fbab2de8ec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.480676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d480593-e33b-44ed-8f7c-29f1d7db3cf0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.542903] env[65758]: DEBUG nova.compute.manager [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 915.543263] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.543758] env[65758]: DEBUG nova.compute.manager [req-7c8124e6-2f56-4abb-bbba-a0d5e3d7d54e req-3acbcce3-b0d5-42b9-a2e6-da03b5f623cc service nova] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Detach interface failed, port_id=875a4c06-92f4-4ace-ba63-e7014651800c, reason: Instance 12c27fac-98e9-486d-bf36-0580a4e0a163 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 915.549758] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e5e96a-d694-4c2b-b6db-bee17d6a5c7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.567738] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 915.568154] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81dbfef9-9974-409e-aa41-3d0559c7efd0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.586956] env[65758]: DEBUG oslo_vmware.api [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 915.586956] env[65758]: value = "task-4660714" [ 915.586956] env[65758]: _type = "Task" [ 915.586956] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.592099] env[65758]: WARNING neutronclient.v2_0.client [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 915.592751] env[65758]: WARNING openstack [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 915.593442] env[65758]: WARNING openstack [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 915.617828] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660713, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.626432] env[65758]: DEBUG oslo_vmware.api [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.721252] env[65758]: DEBUG nova.network.neutron [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updated VIF entry in instance network info cache for port d93558c3-4a75-4e02-98d6-de4d1cf9dee2. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 915.721594] env[65758]: DEBUG nova.network.neutron [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updating instance_info_cache with network_info: [{"id": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "address": "fa:16:3e:08:ef:77", "network": {"id": "75a22867-6396-4a02-93c3-8e7c030e0af3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1988869237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "111dc87614bb42e2bc66ae1bfb092795", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93558c3-4a", "ovs_interfaceid": "d93558c3-4a75-4e02-98d6-de4d1cf9dee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 915.852545] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83106565-8195-41b8-a54f-d77157d6a43d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.862826] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7df5f18-be60-490a-b9a7-758110f80158 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.898371] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 915.898596] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909962', 'volume_id': 'fbd302a4-8737-4848-94c0-7cfc81983fb5', 'name': 'volume-fbd302a4-8737-4848-94c0-7cfc81983fb5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f5911fb-785e-444c-9408-c6884e06c5d3', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbd302a4-8737-4848-94c0-7cfc81983fb5', 'serial': 'fbd302a4-8737-4848-94c0-7cfc81983fb5'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 915.899540] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253972fb-aff1-42a8-81c2-8085905847d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.902626] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105262d3-5658-4ecd-9eb8-1969a06323f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.921995] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb946ef-cc88-4ec8-88f2-4e5f25104605 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.926551] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e892396-25ee-4704-8176-f3d25bd5a123 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.940164] env[65758]: INFO nova.compute.manager [-] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Took 1.35 seconds to deallocate network for instance. [ 915.940702] env[65758]: DEBUG nova.compute.provider_tree [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.966939] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] volume-fbd302a4-8737-4848-94c0-7cfc81983fb5/volume-fbd302a4-8737-4848-94c0-7cfc81983fb5.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 915.968953] env[65758]: DEBUG nova.scheduler.client.report [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.972305] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25dc2144-bfd6-44f1-a5a0-0f6407db3eee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.994485] env[65758]: DEBUG oslo_vmware.api [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 915.994485] env[65758]: value = "task-4660715" [ 915.994485] env[65758]: _type = "Task" [ 915.994485] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.005686] env[65758]: DEBUG oslo_vmware.api [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660715, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.013581] env[65758]: DEBUG nova.network.neutron [-] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 916.090483] env[65758]: DEBUG oslo_vmware.api [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660713, 'name': PowerOnVM_Task, 'duration_secs': 0.809443} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.090651] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 916.090857] env[65758]: INFO nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Took 8.91 seconds to spawn the instance on the hypervisor. [ 916.091073] env[65758]: DEBUG nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 916.094739] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab311b5-e78c-4d1b-960a-b395ffd9f4fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.103642] env[65758]: DEBUG oslo_vmware.api [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660714, 'name': PowerOffVM_Task, 'duration_secs': 0.23179} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.105664] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 916.105899] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 916.108823] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04184d30-43d8-4c3e-b238-26304a62eb4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.190972] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 916.191254] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 916.191511] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Deleting the datastore file [datastore1] 875cbc88-f817-4ea8-a969-b97e875918d1 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 916.191928] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc53ca76-71f0-475f-a6e6-61e3d1c11621 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.200302] env[65758]: DEBUG oslo_vmware.api [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 916.200302] env[65758]: value = "task-4660717" [ 916.200302] env[65758]: _type = "Task" [ 916.200302] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.212185] env[65758]: DEBUG oslo_vmware.api [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.224534] env[65758]: DEBUG oslo_concurrency.lockutils [req-9725f643-18ee-45a8-9ec4-251aef77e475 req-895f635b-7c6f-47d8-8a5f-8c8deefe151a service nova] Releasing lock "refresh_cache-875cbc88-f817-4ea8-a969-b97e875918d1" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.468651] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.487031] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.247s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.487721] env[65758]: DEBUG nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 916.490668] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.094s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.490932] env[65758]: DEBUG nova.objects.instance [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lazy-loading 'resources' on Instance uuid d60aaa5c-913f-4550-a4d5-ab994048da9f {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.506013] env[65758]: DEBUG oslo_vmware.api [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660715, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.516210] env[65758]: INFO nova.compute.manager [-] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Took 1.47 seconds to deallocate network for instance. [ 916.622111] env[65758]: INFO nova.compute.manager [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Took 43.33 seconds to build instance. [ 916.712190] env[65758]: DEBUG oslo_vmware.api [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157803} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.712790] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.713015] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.713722] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.713861] env[65758]: INFO nova.compute.manager [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Took 1.17 seconds to destroy the instance on the hypervisor. [ 916.714156] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 916.714542] env[65758]: DEBUG nova.compute.manager [-] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 916.714588] env[65758]: DEBUG nova.network.neutron [-] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 916.714935] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 916.715807] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 916.720019] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 916.790214] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 916.994385] env[65758]: DEBUG nova.compute.utils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.997033] env[65758]: DEBUG nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 916.997033] env[65758]: DEBUG nova.network.neutron [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 916.997173] env[65758]: WARNING neutronclient.v2_0.client [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 916.997435] env[65758]: WARNING neutronclient.v2_0.client [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 916.998602] env[65758]: WARNING openstack [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 916.999145] env[65758]: WARNING openstack [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 917.023887] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.024106] env[65758]: DEBUG oslo_vmware.api [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660715, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.026811] env[65758]: DEBUG nova.compute.manager [req-46e92bc0-249d-40f1-b7d7-28a726ad7e84 req-8d5ef22a-a832-46a3-bdec-a21e2decadc2 service nova] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Received event network-vif-deleted-e31ffc86-5e08-405f-8129-6af1973003bf {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 917.093661] env[65758]: DEBUG nova.policy [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b15f650508f844388197b63e6fee78a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4c2ab2b80c04c38bfb4c7cafac87fe6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 917.101021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquiring lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.130447] env[65758]: DEBUG oslo_concurrency.lockutils [None req-72392180-4991-4a0c-a4b6-6871a43a5ff9 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.830s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.130447] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.027s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.130447] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquiring lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.130447] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.130447] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.132535] env[65758]: INFO nova.compute.manager [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Terminating instance [ 917.460963] env[65758]: DEBUG nova.network.neutron [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Successfully created port: 30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 917.481444] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076438ae-0b8d-44b0-b9e3-0cd7f3f390ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.490661] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb7bfd6-a385-42d4-a2ae-7838f6883092 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.527462] env[65758]: DEBUG nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 917.531883] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb6c145-665a-44f6-b53f-5703a1e36961 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.538284] env[65758]: DEBUG oslo_vmware.api [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660715, 'name': ReconfigVM_Task, 'duration_secs': 1.369901} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.540225] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Reconfigured VM instance instance-0000003f to attach disk [datastore2] volume-fbd302a4-8737-4848-94c0-7cfc81983fb5/volume-fbd302a4-8737-4848-94c0-7cfc81983fb5.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.547587] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12cb5fc9-20e1-47a4-8136-e67dd8811187 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.558926] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7f9914-3e85-4be6-918a-db4815ca164d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.575117] env[65758]: DEBUG nova.compute.provider_tree [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.578457] env[65758]: DEBUG oslo_vmware.api [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 917.578457] env[65758]: value = "task-4660718" [ 917.578457] env[65758]: _type = "Task" [ 917.578457] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.588813] env[65758]: DEBUG nova.network.neutron [-] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 917.590143] env[65758]: DEBUG oslo_vmware.api [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.640178] env[65758]: DEBUG nova.compute.manager [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 917.640178] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.641028] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f96fe8-fca3-4cc4-91d1-d072f4e72124 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.649857] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.650582] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-606d7732-7237-4560-8347-b5b1983915bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.661549] env[65758]: DEBUG oslo_vmware.api [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 917.661549] env[65758]: value = "task-4660719" [ 917.661549] env[65758]: _type = "Task" [ 917.661549] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.674374] env[65758]: DEBUG oslo_vmware.api [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660719, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.080356] env[65758]: DEBUG nova.scheduler.client.report [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.094958] env[65758]: INFO nova.compute.manager [-] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Took 1.38 seconds to deallocate network for instance. [ 918.095820] env[65758]: DEBUG oslo_vmware.api [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660718, 'name': ReconfigVM_Task, 'duration_secs': 0.160487} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.098153] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909962', 'volume_id': 'fbd302a4-8737-4848-94c0-7cfc81983fb5', 'name': 'volume-fbd302a4-8737-4848-94c0-7cfc81983fb5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f5911fb-785e-444c-9408-c6884e06c5d3', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbd302a4-8737-4848-94c0-7cfc81983fb5', 'serial': 'fbd302a4-8737-4848-94c0-7cfc81983fb5'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 918.175897] env[65758]: DEBUG oslo_vmware.api [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660719, 'name': PowerOffVM_Task, 'duration_secs': 0.230328} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.176181] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.176343] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 918.176598] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28afeef6-1108-4961-8fb5-84e5d6010e7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.260460] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 918.260718] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 918.260880] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Deleting the datastore file [datastore2] aa2f1858-2bb2-4f12-bc05-ef6913ef36e2 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 918.261269] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f114f6ab-1ec6-40c9-bd9b-e7265fbb8def {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.270277] env[65758]: DEBUG oslo_vmware.api [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for the task: (returnval){ [ 918.270277] env[65758]: value = "task-4660721" [ 918.270277] env[65758]: _type = "Task" [ 918.270277] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.282338] env[65758]: DEBUG oslo_vmware.api [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660721, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.488287] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.488690] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.543356] env[65758]: DEBUG nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 918.570321] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 918.570584] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.570740] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 918.570949] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.571121] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 918.571271] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 918.571475] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 918.571628] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 918.571788] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 918.571939] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 918.572114] env[65758]: DEBUG nova.virt.hardware [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 918.573030] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae354661-5964-475c-8b87-751ad0b8dd60 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.581287] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035b3a36-f139-4b52-82b8-3281f559d1b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.595242] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.105s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.597572] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.037s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.599033] env[65758]: INFO nova.compute.claims [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.606666] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.614753] env[65758]: INFO nova.scheduler.client.report [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Deleted allocations for instance d60aaa5c-913f-4550-a4d5-ab994048da9f [ 918.784908] env[65758]: DEBUG oslo_vmware.api [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Task: {'id': task-4660721, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159898} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.785449] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.785983] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.786542] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.786681] env[65758]: INFO nova.compute.manager [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 918.787188] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 918.787868] env[65758]: DEBUG nova.compute.manager [-] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 918.787868] env[65758]: DEBUG nova.network.neutron [-] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 918.788177] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 918.789037] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 918.789126] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 918.829257] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 918.992160] env[65758]: DEBUG nova.compute.manager [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 919.010985] env[65758]: DEBUG nova.network.neutron [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Successfully updated port: 30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 919.125216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91426f2f-10d6-43b6-870e-b08c9f0e8a3e tempest-ServerShowV247Test-571414411 tempest-ServerShowV247Test-571414411-project-member] Lock "d60aaa5c-913f-4550-a4d5-ab994048da9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.089s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.141960] env[65758]: DEBUG nova.compute.manager [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Received event network-vif-deleted-d93558c3-4a75-4e02-98d6-de4d1cf9dee2 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 919.145077] env[65758]: DEBUG nova.compute.manager [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Received event network-vif-plugged-30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 919.145077] env[65758]: DEBUG oslo_concurrency.lockutils [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] Acquiring lock "be3de9bd-da98-4c7e-ad7c-933245523695-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.145077] env[65758]: DEBUG oslo_concurrency.lockutils [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] Lock "be3de9bd-da98-4c7e-ad7c-933245523695-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.145077] env[65758]: DEBUG oslo_concurrency.lockutils [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] Lock "be3de9bd-da98-4c7e-ad7c-933245523695-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.145077] env[65758]: DEBUG nova.compute.manager [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] No waiting events found dispatching network-vif-plugged-30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 919.145077] env[65758]: WARNING nova.compute.manager [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Received unexpected event network-vif-plugged-30972d97-c096-41a5-b3bf-289b54c95d25 for instance with vm_state building and task_state spawning. [ 919.145077] env[65758]: DEBUG nova.compute.manager [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Received event network-changed-30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 919.145077] env[65758]: DEBUG nova.compute.manager [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Refreshing instance network info cache due to event network-changed-30972d97-c096-41a5-b3bf-289b54c95d25. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 919.145077] env[65758]: DEBUG oslo_concurrency.lockutils [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] Acquiring lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.145077] env[65758]: DEBUG oslo_concurrency.lockutils [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] Acquired lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.145077] env[65758]: DEBUG nova.network.neutron [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Refreshing network info cache for port 30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 919.147214] env[65758]: DEBUG nova.objects.instance [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.161115] env[65758]: DEBUG nova.compute.manager [req-935d33d8-b0d8-466b-a7f6-bdf9a4b1656e req-7f2ade75-28be-47b4-a92d-e5e747cfea0e service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Received event network-vif-deleted-76336f1a-f792-496d-bde3-e6466ebd059d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 919.161115] env[65758]: INFO nova.compute.manager [req-935d33d8-b0d8-466b-a7f6-bdf9a4b1656e req-7f2ade75-28be-47b4-a92d-e5e747cfea0e service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Neutron deleted interface 76336f1a-f792-496d-bde3-e6466ebd059d; detaching it from the instance and deleting it from the info cache [ 919.161115] env[65758]: DEBUG nova.network.neutron [req-935d33d8-b0d8-466b-a7f6-bdf9a4b1656e req-7f2ade75-28be-47b4-a92d-e5e747cfea0e service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 919.514324] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.519410] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.556496] env[65758]: DEBUG nova.network.neutron [-] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 919.650542] env[65758]: WARNING neutronclient.v2_0.client [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 919.651447] env[65758]: WARNING openstack [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 919.651812] env[65758]: WARNING openstack [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 919.665406] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26339d5b-9b30-4f1d-a0e2-c56bac952ea8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.431s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.666482] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34877fd9-aa9e-4973-a740-7582bf73af9a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.679580] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a06c0b-5c6f-4d94-9664-bb99c755f57f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.700603] env[65758]: DEBUG nova.network.neutron [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 919.717656] env[65758]: DEBUG nova.compute.manager [req-935d33d8-b0d8-466b-a7f6-bdf9a4b1656e req-7f2ade75-28be-47b4-a92d-e5e747cfea0e service nova] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Detach interface failed, port_id=76336f1a-f792-496d-bde3-e6466ebd059d, reason: Instance aa2f1858-2bb2-4f12-bc05-ef6913ef36e2 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 919.757496] env[65758]: DEBUG oslo_concurrency.lockutils [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.758358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.758358] env[65758]: DEBUG nova.compute.manager [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 919.759421] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2caa4e6f-c426-435a-972f-a3b5c06a89f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.767141] env[65758]: DEBUG nova.compute.manager [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 919.767896] env[65758]: DEBUG nova.objects.instance [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.831551] env[65758]: DEBUG nova.network.neutron [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 920.040216] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290f19c7-4003-4874-9580-15a38626568e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.049293] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e52469-5144-4a92-9102-788845ed0915 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.086341] env[65758]: INFO nova.compute.manager [-] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Took 1.30 seconds to deallocate network for instance. [ 920.089073] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d51c94-83e5-4772-a183-79b19ab8a86c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.102725] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3f9d4f-3826-4d21-b765-5576098700f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.120300] env[65758]: DEBUG nova.compute.provider_tree [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.337077] env[65758]: DEBUG oslo_concurrency.lockutils [req-6a9d3890-03a2-441a-b50e-8853f5f34a62 req-8138248f-7531-4138-b42c-b6abf57a0308 service nova] Releasing lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.337553] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.337751] env[65758]: DEBUG nova.network.neutron [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 920.597662] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.622839] env[65758]: DEBUG nova.scheduler.client.report [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.774944] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 920.775341] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf9e8c70-74f1-4dfd-b4d8-6a3d4bf61dee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.785710] env[65758]: DEBUG oslo_vmware.api [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 920.785710] env[65758]: value = "task-4660722" [ 920.785710] env[65758]: _type = "Task" [ 920.785710] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.796684] env[65758]: DEBUG oslo_vmware.api [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.841119] env[65758]: WARNING openstack [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 920.841700] env[65758]: WARNING openstack [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 920.898763] env[65758]: DEBUG nova.network.neutron [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 920.982733] env[65758]: WARNING neutronclient.v2_0.client [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 920.983558] env[65758]: WARNING openstack [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 920.983929] env[65758]: WARNING openstack [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 921.073058] env[65758]: DEBUG nova.network.neutron [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Updating instance_info_cache with network_info: [{"id": "30972d97-c096-41a5-b3bf-289b54c95d25", "address": "fa:16:3e:58:c9:d7", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30972d97-c0", "ovs_interfaceid": "30972d97-c096-41a5-b3bf-289b54c95d25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 921.128130] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.128686] env[65758]: DEBUG nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 921.131982] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.884s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.132235] env[65758]: DEBUG nova.objects.instance [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lazy-loading 'resources' on Instance uuid 96103549-80a5-462d-9f73-f5f6363ab9fc {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.298395] env[65758]: DEBUG oslo_vmware.api [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660722, 'name': PowerOffVM_Task, 'duration_secs': 0.227923} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.299114] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.299114] env[65758]: DEBUG nova.compute.manager [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 921.300038] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5fd1ba-9754-406b-8b7e-eb30d57e94bd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.578022] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.578022] env[65758]: DEBUG nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Instance network_info: |[{"id": "30972d97-c096-41a5-b3bf-289b54c95d25", "address": "fa:16:3e:58:c9:d7", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30972d97-c0", "ovs_interfaceid": "30972d97-c096-41a5-b3bf-289b54c95d25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 921.578022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:c9:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5efce30e-48dd-493a-a354-f562a8adf7af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30972d97-c096-41a5-b3bf-289b54c95d25', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.585997] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 921.586402] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.586744] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ccbaa74-97c6-4458-b611-b188a8a36c4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.609439] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.609439] env[65758]: value = "task-4660723" [ 921.609439] env[65758]: _type = "Task" [ 921.609439] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.619073] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660723, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.636023] env[65758]: DEBUG nova.compute.utils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 921.636023] env[65758]: DEBUG nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 921.636023] env[65758]: DEBUG nova.network.neutron [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 921.644038] env[65758]: WARNING neutronclient.v2_0.client [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 921.644038] env[65758]: WARNING neutronclient.v2_0.client [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 921.644038] env[65758]: WARNING openstack [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 921.644038] env[65758]: WARNING openstack [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 921.762407] env[65758]: DEBUG nova.policy [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '742a9f6633b54c6f8cd432ac94b59e25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e3a324879d646699f950687546ea861', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 921.814565] env[65758]: DEBUG oslo_concurrency.lockutils [None req-95140aa9-155c-49a3-9a42-bd812333719c tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.129256] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660723, 'name': CreateVM_Task, 'duration_secs': 0.377124} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.133876] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.135165] env[65758]: WARNING neutronclient.v2_0.client [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 922.137334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.137334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.137334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 922.137762] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65c18c3d-cda5-48ad-add7-c2690e858b6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.144104] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 922.144104] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524eb674-f034-02dc-cda5-aa6e181e374b" [ 922.144104] env[65758]: _type = "Task" [ 922.144104] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.155627] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524eb674-f034-02dc-cda5-aa6e181e374b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.159716] env[65758]: DEBUG nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 922.174318] env[65758]: DEBUG nova.network.neutron [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Successfully created port: a2f86d86-ad34-41b7-a00d-cd72df0fb614 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 922.204604] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e17f3e-f288-411c-88c4-4992e6d9526a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.215244] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87944474-6027-4642-a347-048bf77e08b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.250320] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7055cb7-9e4b-4df6-b747-8846a0ea9c8c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.259189] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538236e0-14a5-4d2b-96d0-6adb8beda9fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.276851] env[65758]: DEBUG nova.compute.provider_tree [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.393330] env[65758]: DEBUG nova.objects.instance [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.663294] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524eb674-f034-02dc-cda5-aa6e181e374b, 'name': SearchDatastore_Task, 'duration_secs': 0.014723} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.663671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.663895] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.664119] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.664224] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.664401] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.668251] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f76793b-2f06-4e14-80cf-8c4f244ed7d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.679829] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.680599] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.680691] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c197f025-12b8-4241-96b5-b159af33eb5b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.688522] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 922.688522] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8d354-5fff-eb65-fab6-1a58b89a7817" [ 922.688522] env[65758]: _type = "Task" [ 922.688522] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.697516] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8d354-5fff-eb65-fab6-1a58b89a7817, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.782253] env[65758]: DEBUG nova.scheduler.client.report [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.901411] env[65758]: DEBUG oslo_concurrency.lockutils [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.901598] env[65758]: DEBUG oslo_concurrency.lockutils [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.901772] env[65758]: DEBUG nova.network.neutron [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 922.901943] env[65758]: DEBUG nova.objects.instance [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'info_cache' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.176170] env[65758]: DEBUG nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 923.200165] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d8d354-5fff-eb65-fab6-1a58b89a7817, 'name': SearchDatastore_Task, 'duration_secs': 0.010028} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.200745] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3735043a-365e-44f1-9831-04b4e4ab42ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.213208] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 923.213208] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520d28c3-4616-8a48-3313-4a9bba34cbe0" [ 923.213208] env[65758]: _type = "Task" [ 923.213208] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.215959] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 923.216546] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.216546] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 923.216546] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.216723] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 923.216723] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 923.217089] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 923.217089] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 923.217289] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 923.217432] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 923.217580] env[65758]: DEBUG nova.virt.hardware [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 923.218528] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c108db-2070-4328-a19a-2277df0e9a0a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.237752] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f74f08-d12c-427a-b06c-675a569a40f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.242193] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520d28c3-4616-8a48-3313-4a9bba34cbe0, 'name': SearchDatastore_Task, 'duration_secs': 0.010732} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.242498] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.242751] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695/be3de9bd-da98-4c7e-ad7c-933245523695.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.243523] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a985a3b9-c0ce-421f-b2f1-f133d35b42eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.258145] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 923.258145] env[65758]: value = "task-4660724" [ 923.258145] env[65758]: _type = "Task" [ 923.258145] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.267595] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660724, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.285793] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.154s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.289352] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.913s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.289616] env[65758]: DEBUG nova.objects.instance [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lazy-loading 'resources' on Instance uuid 47bb5b02-4f84-468e-ad46-2c1c96b65c97 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.321374] env[65758]: INFO nova.scheduler.client.report [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted allocations for instance 96103549-80a5-462d-9f73-f5f6363ab9fc [ 923.405987] env[65758]: DEBUG nova.objects.base [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Object Instance<7f5911fb-785e-444c-9408-c6884e06c5d3> lazy-loaded attributes: flavor,info_cache {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 923.772268] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660724, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488022} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.772528] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695/be3de9bd-da98-4c7e-ad7c-933245523695.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 923.772909] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 923.773047] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c064de9c-000a-4865-bf11-a48d9b090c53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.782459] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 923.782459] env[65758]: value = "task-4660725" [ 923.782459] env[65758]: _type = "Task" [ 923.782459] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.798590] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660725, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.834048] env[65758]: DEBUG oslo_concurrency.lockutils [None req-af6d03de-5667-40c3-9d45-a534aeaa101f tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "96103549-80a5-462d-9f73-f5f6363ab9fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.554s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.910677] env[65758]: WARNING neutronclient.v2_0.client [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 923.911389] env[65758]: WARNING openstack [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 923.911759] env[65758]: WARNING openstack [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 924.180459] env[65758]: WARNING neutronclient.v2_0.client [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 924.181226] env[65758]: WARNING openstack [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 924.181972] env[65758]: WARNING openstack [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 924.239868] env[65758]: DEBUG nova.compute.manager [req-79d78d1f-3ba1-4e47-b125-340c46e95391 req-cda3c102-474b-49dd-9fd6-99e5b487346a service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Received event network-vif-plugged-a2f86d86-ad34-41b7-a00d-cd72df0fb614 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 924.240471] env[65758]: DEBUG oslo_concurrency.lockutils [req-79d78d1f-3ba1-4e47-b125-340c46e95391 req-cda3c102-474b-49dd-9fd6-99e5b487346a service nova] Acquiring lock "1ff48e58-9240-466d-bec4-51394e550c34-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.240853] env[65758]: DEBUG oslo_concurrency.lockutils [req-79d78d1f-3ba1-4e47-b125-340c46e95391 req-cda3c102-474b-49dd-9fd6-99e5b487346a service nova] Lock "1ff48e58-9240-466d-bec4-51394e550c34-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.240853] env[65758]: DEBUG oslo_concurrency.lockutils [req-79d78d1f-3ba1-4e47-b125-340c46e95391 req-cda3c102-474b-49dd-9fd6-99e5b487346a service nova] Lock "1ff48e58-9240-466d-bec4-51394e550c34-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.241042] env[65758]: DEBUG nova.compute.manager [req-79d78d1f-3ba1-4e47-b125-340c46e95391 req-cda3c102-474b-49dd-9fd6-99e5b487346a service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] No waiting events found dispatching network-vif-plugged-a2f86d86-ad34-41b7-a00d-cd72df0fb614 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 924.241217] env[65758]: WARNING nova.compute.manager [req-79d78d1f-3ba1-4e47-b125-340c46e95391 req-cda3c102-474b-49dd-9fd6-99e5b487346a service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Received unexpected event network-vif-plugged-a2f86d86-ad34-41b7-a00d-cd72df0fb614 for instance with vm_state building and task_state spawning. [ 924.275526] env[65758]: DEBUG nova.network.neutron [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Successfully updated port: a2f86d86-ad34-41b7-a00d-cd72df0fb614 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 924.279711] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6d8571-6932-48cb-88ab-e163f1fba66f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.290719] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f5b568-487f-43ba-9c55-b0fd2f4b5f70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.298091] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660725, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068998} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.298820] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.299936] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb3f31a-3226-40af-a3df-f6906f4af3be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.333451] env[65758]: DEBUG nova.network.neutron [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updating instance_info_cache with network_info: [{"id": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "address": "fa:16:3e:f0:a2:8e", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb28e5b-cb", "ovs_interfaceid": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 924.335827] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d798ee1d-9a35-4ab8-b60d-e55963017dac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.361284] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695/be3de9bd-da98-4c7e-ad7c-933245523695.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.363662] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9643c428-0b00-47f6-a169-9125c7b56550 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.384128] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "63b744d2-541a-42e3-9717-b06a4459fd50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.384729] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.387301] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98909c22-43c1-47b3-8085-50a32e35baea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.394037] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 924.394037] env[65758]: value = "task-4660726" [ 924.394037] env[65758]: _type = "Task" [ 924.394037] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.407035] env[65758]: DEBUG nova.compute.provider_tree [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.416520] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660726, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.784533] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "refresh_cache-1ff48e58-9240-466d-bec4-51394e550c34" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.784533] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "refresh_cache-1ff48e58-9240-466d-bec4-51394e550c34" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.784533] env[65758]: DEBUG nova.network.neutron [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 924.852511] env[65758]: DEBUG oslo_concurrency.lockutils [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Releasing lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.893221] env[65758]: DEBUG nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 924.905351] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660726, 'name': ReconfigVM_Task, 'duration_secs': 0.355309} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.905675] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Reconfigured VM instance instance-0000004a to attach disk [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695/be3de9bd-da98-4c7e-ad7c-933245523695.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 924.906365] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34099c38-ceb5-4131-88e3-1ead77e3cd1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.918758] env[65758]: DEBUG nova.scheduler.client.report [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.919425] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 924.919425] env[65758]: value = "task-4660727" [ 924.919425] env[65758]: _type = "Task" [ 924.919425] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.930464] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660727, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.291234] env[65758]: WARNING openstack [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.291699] env[65758]: WARNING openstack [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 925.327326] env[65758]: DEBUG nova.network.neutron [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 925.407119] env[65758]: WARNING neutronclient.v2_0.client [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 925.408114] env[65758]: WARNING openstack [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 925.408437] env[65758]: WARNING openstack [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 925.421545] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.132s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.424810] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.425059] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.652s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.425289] env[65758]: DEBUG nova.objects.instance [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lazy-loading 'resources' on Instance uuid 79c63944-c4c8-4c7c-bc42-3f958d737e66 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.438097] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660727, 'name': Rename_Task, 'duration_secs': 0.152505} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.438394] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 925.438690] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce265a93-604a-4ffa-a459-5bda1786d80c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.443610] env[65758]: INFO nova.scheduler.client.report [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Deleted allocations for instance 47bb5b02-4f84-468e-ad46-2c1c96b65c97 [ 925.448901] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 925.448901] env[65758]: value = "task-4660728" [ 925.448901] env[65758]: _type = "Task" [ 925.448901] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.459159] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.513973] env[65758]: DEBUG nova.network.neutron [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Updating instance_info_cache with network_info: [{"id": "a2f86d86-ad34-41b7-a00d-cd72df0fb614", "address": "fa:16:3e:9a:0b:12", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f86d86-ad", "ovs_interfaceid": "a2f86d86-ad34-41b7-a00d-cd72df0fb614", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 925.859246] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 925.859769] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd93a729-05e2-4e85-84bb-81251feba7fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.869909] env[65758]: DEBUG oslo_vmware.api [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 925.869909] env[65758]: value = "task-4660729" [ 925.869909] env[65758]: _type = "Task" [ 925.869909] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.881361] env[65758]: DEBUG oslo_vmware.api [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660729, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.959781] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2c94ede0-8070-4b4a-9b49-69d71a6398b4 tempest-ServersTestJSON-567428874 tempest-ServersTestJSON-567428874-project-member] Lock "47bb5b02-4f84-468e-ad46-2c1c96b65c97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.342s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.968178] env[65758]: DEBUG oslo_vmware.api [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660728, 'name': PowerOnVM_Task, 'duration_secs': 0.483935} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.968356] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 925.968573] env[65758]: INFO nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Took 7.43 seconds to spawn the instance on the hypervisor. [ 925.968703] env[65758]: DEBUG nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 925.969617] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b54a60-749f-4494-a081-4e323a581ec6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.018024] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "refresh_cache-1ff48e58-9240-466d-bec4-51394e550c34" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.018024] env[65758]: DEBUG nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Instance network_info: |[{"id": "a2f86d86-ad34-41b7-a00d-cd72df0fb614", "address": "fa:16:3e:9a:0b:12", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f86d86-ad", "ovs_interfaceid": "a2f86d86-ad34-41b7-a00d-cd72df0fb614", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 926.018462] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:0b:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2f86d86-ad34-41b7-a00d-cd72df0fb614', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.028276] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 926.032434] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.033095] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b670e8d0-8975-4ad1-9876-01b4dcfa5210 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.062205] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.062205] env[65758]: value = "task-4660730" [ 926.062205] env[65758]: _type = "Task" [ 926.062205] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.073624] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660730, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.343342] env[65758]: DEBUG nova.compute.manager [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Received event network-changed-a2f86d86-ad34-41b7-a00d-cd72df0fb614 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 926.343561] env[65758]: DEBUG nova.compute.manager [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Refreshing instance network info cache due to event network-changed-a2f86d86-ad34-41b7-a00d-cd72df0fb614. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 926.344535] env[65758]: DEBUG oslo_concurrency.lockutils [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] Acquiring lock "refresh_cache-1ff48e58-9240-466d-bec4-51394e550c34" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.344702] env[65758]: DEBUG oslo_concurrency.lockutils [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] Acquired lock "refresh_cache-1ff48e58-9240-466d-bec4-51394e550c34" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.344869] env[65758]: DEBUG nova.network.neutron [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Refreshing network info cache for port a2f86d86-ad34-41b7-a00d-cd72df0fb614 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 926.382187] env[65758]: DEBUG oslo_vmware.api [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660729, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.402163] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd2b248-7514-4712-8882-f99ea4b7e811 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.409623] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a642e948-75c3-42a7-990a-fb66f788bd60 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.445977] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3433dfb0-b495-433c-a960-73652e652cbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.456097] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a4ccf8-f1cc-4a45-976a-e2a4ff1a63c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.473476] env[65758]: DEBUG nova.compute.provider_tree [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.493484] env[65758]: INFO nova.compute.manager [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Took 49.18 seconds to build instance. [ 926.574694] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660730, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.851031] env[65758]: WARNING neutronclient.v2_0.client [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 926.851185] env[65758]: WARNING openstack [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 926.851581] env[65758]: WARNING openstack [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 926.882782] env[65758]: DEBUG oslo_vmware.api [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660729, 'name': PowerOnVM_Task, 'duration_secs': 0.659225} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.884062] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 926.884062] env[65758]: DEBUG nova.compute.manager [None req-deccbefc-3012-4f25-b60a-964ecc4fc7df tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 926.884188] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31722733-b4db-4878-a3d9-7d12b91ee9e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.977591] env[65758]: DEBUG nova.scheduler.client.report [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.994250] env[65758]: DEBUG oslo_concurrency.lockutils [None req-442530d0-e7c8-4902-a43c-5c9955361575 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "be3de9bd-da98-4c7e-ad7c-933245523695" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.696s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.074140] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660730, 'name': CreateVM_Task, 'duration_secs': 0.586621} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.074140] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.074669] env[65758]: WARNING neutronclient.v2_0.client [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 927.075039] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.078031] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.078031] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 927.078031] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-887dc580-b441-43ae-9522-02a2a715b594 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.082037] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 927.082037] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52daca14-92e0-8053-b90b-5c2042aaebdb" [ 927.082037] env[65758]: _type = "Task" [ 927.082037] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.091728] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52daca14-92e0-8053-b90b-5c2042aaebdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.486174] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.492904] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.390s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.493407] env[65758]: DEBUG nova.objects.instance [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lazy-loading 'resources' on Instance uuid a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.528695] env[65758]: INFO nova.scheduler.client.report [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Deleted allocations for instance 79c63944-c4c8-4c7c-bc42-3f958d737e66 [ 927.566079] env[65758]: WARNING neutronclient.v2_0.client [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 927.566592] env[65758]: WARNING openstack [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 927.566946] env[65758]: WARNING openstack [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 927.594725] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52daca14-92e0-8053-b90b-5c2042aaebdb, 'name': SearchDatastore_Task, 'duration_secs': 0.011441} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.595149] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.595308] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 927.605589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.605589] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.605589] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.605589] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38b25ecf-bb25-43c7-990c-90d3683d756a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.607836] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.608052] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 927.609672] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cedd9b7a-2c33-4eb4-84b3-2473830a7337 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.616345] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 927.616345] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525fa48a-30b1-cbaa-fcc6-05843ecee4a3" [ 927.616345] env[65758]: _type = "Task" [ 927.616345] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.629492] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525fa48a-30b1-cbaa-fcc6-05843ecee4a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.669817] env[65758]: DEBUG nova.compute.manager [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Received event network-changed-30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 927.670707] env[65758]: DEBUG nova.compute.manager [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Refreshing instance network info cache due to event network-changed-30972d97-c096-41a5-b3bf-289b54c95d25. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 927.670707] env[65758]: DEBUG oslo_concurrency.lockutils [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] Acquiring lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.670707] env[65758]: DEBUG oslo_concurrency.lockutils [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] Acquired lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.672964] env[65758]: DEBUG nova.network.neutron [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Refreshing network info cache for port 30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 927.709606] env[65758]: DEBUG nova.network.neutron [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Updated VIF entry in instance network info cache for port a2f86d86-ad34-41b7-a00d-cd72df0fb614. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 927.709983] env[65758]: DEBUG nova.network.neutron [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Updating instance_info_cache with network_info: [{"id": "a2f86d86-ad34-41b7-a00d-cd72df0fb614", "address": "fa:16:3e:9a:0b:12", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f86d86-ad", "ovs_interfaceid": "a2f86d86-ad34-41b7-a00d-cd72df0fb614", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 928.038965] env[65758]: DEBUG oslo_concurrency.lockutils [None req-497094d9-263e-48ee-9712-a806aa5a042a tempest-ServersNegativeTestMultiTenantJSON-469399064 tempest-ServersNegativeTestMultiTenantJSON-469399064-project-member] Lock "79c63944-c4c8-4c7c-bc42-3f958d737e66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.923s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.130327] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525fa48a-30b1-cbaa-fcc6-05843ecee4a3, 'name': SearchDatastore_Task, 'duration_secs': 0.010709} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.131606] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4502b615-73d0-4f1b-8af8-1edd73d6b845 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.140834] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 928.140834] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f3c610-c895-97fe-a85c-033ef62e3743" [ 928.140834] env[65758]: _type = "Task" [ 928.140834] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.151185] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f3c610-c895-97fe-a85c-033ef62e3743, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.174598] env[65758]: WARNING neutronclient.v2_0.client [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 928.177607] env[65758]: WARNING openstack [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 928.178185] env[65758]: WARNING openstack [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 928.216678] env[65758]: DEBUG oslo_concurrency.lockutils [req-c24feda9-fa39-4b63-99f4-bc39125add70 req-7707cd87-024d-47b9-8c7b-16a60549ca8a service nova] Releasing lock "refresh_cache-1ff48e58-9240-466d-bec4-51394e550c34" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.424049] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f7dcf8-6c6b-4fe2-988b-d9e3d764ed0e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.432820] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfe6572-d291-4b07-9e6d-acb06bb1972e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.467174] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366024b2-cf95-4043-9b74-81f60b90295b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.477148] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0393890b-e70e-43a4-840c-a29a6555ad96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.492776] env[65758]: DEBUG nova.compute.provider_tree [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.550180] env[65758]: WARNING neutronclient.v2_0.client [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 928.550522] env[65758]: WARNING openstack [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 928.551879] env[65758]: WARNING openstack [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 928.654174] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f3c610-c895-97fe-a85c-033ef62e3743, 'name': SearchDatastore_Task, 'duration_secs': 0.011078} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.654998] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.654998] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 1ff48e58-9240-466d-bec4-51394e550c34/1ff48e58-9240-466d-bec4-51394e550c34.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 928.655162] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b20b6301-b373-4918-befd-3f2e142ed711 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.659764] env[65758]: DEBUG nova.network.neutron [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Updated VIF entry in instance network info cache for port 30972d97-c096-41a5-b3bf-289b54c95d25. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 928.660122] env[65758]: DEBUG nova.network.neutron [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Updating instance_info_cache with network_info: [{"id": "30972d97-c096-41a5-b3bf-289b54c95d25", "address": "fa:16:3e:58:c9:d7", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30972d97-c0", "ovs_interfaceid": "30972d97-c096-41a5-b3bf-289b54c95d25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 928.664387] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 928.664387] env[65758]: value = "task-4660731" [ 928.664387] env[65758]: _type = "Task" [ 928.664387] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.675174] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660731, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.997676] env[65758]: DEBUG nova.scheduler.client.report [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.163980] env[65758]: DEBUG oslo_concurrency.lockutils [req-5ea0514a-038f-4444-9a68-e1fa1035efa3 req-e9bd8994-8ecf-4095-9643-9ba1e0aa3b18 service nova] Releasing lock "refresh_cache-be3de9bd-da98-4c7e-ad7c-933245523695" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.179424] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660731, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.506063] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.506900] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.739s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.507732] env[65758]: DEBUG nova.objects.instance [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lazy-loading 'resources' on Instance uuid 31816c0c-d7d2-48db-9a87-a1e03c938a60 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.536666] env[65758]: INFO nova.scheduler.client.report [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleted allocations for instance a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a [ 929.678077] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660731, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532791} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.678077] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 1ff48e58-9240-466d-bec4-51394e550c34/1ff48e58-9240-466d-bec4-51394e550c34.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 929.678077] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 929.678485] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e5fc6a4-ca93-4cea-93e3-bbf9c39429e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.687580] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 929.687580] env[65758]: value = "task-4660732" [ 929.687580] env[65758]: _type = "Task" [ 929.687580] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.698542] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660732, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.046571] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0fe1161b-e587-4bef-ac61-3510604c2559 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.474s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.199159] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660732, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145322} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.201733] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.202780] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0061dcf3-fe4e-494e-b263-7376b729c6f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.229237] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 1ff48e58-9240-466d-bec4-51394e550c34/1ff48e58-9240-466d-bec4-51394e550c34.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.232411] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aee92564-9124-43df-b819-ea841a64e5d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.260689] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 930.260689] env[65758]: value = "task-4660733" [ 930.260689] env[65758]: _type = "Task" [ 930.260689] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.275657] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660733, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.450538] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8a9706-7eec-46eb-8cea-3608c849599a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.463639] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a15f27-d779-4440-a7d6-43b4c9d141d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.508162] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5289b6d4-1f32-425c-ad6d-98480223df4d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.519292] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230c77c8-188f-4ca7-b552-b057948f5771 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.539717] env[65758]: DEBUG nova.compute.provider_tree [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.773799] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660733, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.044856] env[65758]: DEBUG nova.scheduler.client.report [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 931.273325] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660733, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.553254] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.046s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.555697] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.038s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.555937] env[65758]: DEBUG nova.objects.instance [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lazy-loading 'resources' on Instance uuid d42d0818-1486-4696-9871-2cf989aeb885 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.593538] env[65758]: INFO nova.scheduler.client.report [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleted allocations for instance 31816c0c-d7d2-48db-9a87-a1e03c938a60 [ 931.775322] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660733, 'name': ReconfigVM_Task, 'duration_secs': 1.183239} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.775862] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 1ff48e58-9240-466d-bec4-51394e550c34/1ff48e58-9240-466d-bec4-51394e550c34.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 931.779419] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a22c160b-fb40-4f17-a435-268353ac7737 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.790580] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 931.790580] env[65758]: value = "task-4660734" [ 931.790580] env[65758]: _type = "Task" [ 931.790580] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.800617] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660734, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.102291] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dd7d8532-9892-488c-9dd0-9782a77b94e4 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "31816c0c-d7d2-48db-9a87-a1e03c938a60" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.918s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.234103] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "33098961-060f-4503-a805-6ae7351b45ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.235093] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "33098961-060f-4503-a805-6ae7351b45ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.304185] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660734, 'name': Rename_Task, 'duration_secs': 0.161608} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.308381] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.308611] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f3bde07-5607-445f-8dfd-db9709884f69 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.318340] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 932.318340] env[65758]: value = "task-4660735" [ 932.318340] env[65758]: _type = "Task" [ 932.318340] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.327878] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.463303] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "454bd092-f683-4a3a-91c9-65191d6996f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.463543] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "454bd092-f683-4a3a-91c9-65191d6996f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.463791] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "454bd092-f683-4a3a-91c9-65191d6996f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.463914] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "454bd092-f683-4a3a-91c9-65191d6996f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.464118] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "454bd092-f683-4a3a-91c9-65191d6996f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.466623] env[65758]: INFO nova.compute.manager [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Terminating instance [ 932.490433] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d3e90d-7083-4e6d-a2ee-eadcb670a17e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.500633] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b76c62-f806-4ac1-9543-d98c6fa1ebcc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.537084] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812a2832-0dac-4096-8bb9-339298cc5aa7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.546031] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff903850-3451-4da4-a677-0dc16be6a076 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.561071] env[65758]: DEBUG nova.compute.provider_tree [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.742220] env[65758]: DEBUG nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 932.829359] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660735, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.972160] env[65758]: DEBUG nova.compute.manager [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 932.972160] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.972683] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43b8d6d-c37c-43fa-9e37-99e3f5db7376 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.981831] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.982020] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd31f2bf-939f-4154-ad74-36879cbe2cd0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.990255] env[65758]: DEBUG oslo_vmware.api [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 932.990255] env[65758]: value = "task-4660736" [ 932.990255] env[65758]: _type = "Task" [ 932.990255] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.999904] env[65758]: DEBUG oslo_vmware.api [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.065076] env[65758]: DEBUG nova.scheduler.client.report [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 933.266351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.329508] env[65758]: DEBUG oslo_vmware.api [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660735, 'name': PowerOnVM_Task, 'duration_secs': 0.807627} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.331096] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.331096] env[65758]: INFO nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Took 10.16 seconds to spawn the instance on the hypervisor. [ 933.331096] env[65758]: DEBUG nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 933.331096] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54836395-9839-46bd-adfa-4e32218e7a4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.507678] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.507678] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.512596] env[65758]: DEBUG oslo_vmware.api [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660736, 'name': PowerOffVM_Task, 'duration_secs': 0.498841} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.513114] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.513114] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.513309] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe19a4cd-e867-48fb-952e-8946ef170a66 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.570977] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.014s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.572764] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.484s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.572965] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.574820] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.187s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.575052] env[65758]: DEBUG nova.objects.instance [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lazy-loading 'resources' on Instance uuid 2d787237-26e5-4519-9f6e-1d30b9d016cf {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 933.589172] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.589172] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.589172] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleting the datastore file [datastore2] 454bd092-f683-4a3a-91c9-65191d6996f4 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.589436] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37fcf053-8f5b-46bf-8a83-41223b5a9f7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.601451] env[65758]: DEBUG oslo_vmware.api [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 933.601451] env[65758]: value = "task-4660738" [ 933.601451] env[65758]: _type = "Task" [ 933.601451] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.610731] env[65758]: DEBUG oslo_vmware.api [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.612619] env[65758]: INFO nova.scheduler.client.report [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Deleted allocations for instance d42d0818-1486-4696-9871-2cf989aeb885 [ 933.618482] env[65758]: INFO nova.scheduler.client.report [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleted allocations for instance 105c53ce-e657-4a29-bc7f-96b4f885707a [ 933.851247] env[65758]: INFO nova.compute.manager [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Took 43.32 seconds to build instance. [ 934.015264] env[65758]: DEBUG nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 934.109881] env[65758]: DEBUG oslo_vmware.api [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15308} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.110256] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 934.110641] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 934.110641] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 934.110753] env[65758]: INFO nova.compute.manager [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 934.111322] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 934.111322] env[65758]: DEBUG nova.compute.manager [-] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 934.111412] env[65758]: DEBUG nova.network.neutron [-] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 934.111780] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 934.112648] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 934.112797] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 934.128105] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21a7cc4a-d802-4726-b865-710177914d09 tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "105c53ce-e657-4a29-bc7f-96b4f885707a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.565s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.129153] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e93ec421-2944-4512-ae19-24167dd609be tempest-ServersTestBootFromVolume-151696442 tempest-ServersTestBootFromVolume-151696442-project-member] Lock "d42d0818-1486-4696-9871-2cf989aeb885" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.162s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.154787] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 934.354610] env[65758]: DEBUG oslo_concurrency.lockutils [None req-034993e0-c8d3-4bb9-9153-704e6c3b6a22 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "1ff48e58-9240-466d-bec4-51394e550c34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.830s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.464315] env[65758]: DEBUG nova.compute.manager [req-666df26e-dd78-4853-af6a-3730fafabd39 req-0c6d5fed-1efa-4f76-8257-773e5cab989b service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Received event network-vif-deleted-891c589c-0854-41a8-8eb2-e06ac9124837 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 934.464315] env[65758]: INFO nova.compute.manager [req-666df26e-dd78-4853-af6a-3730fafabd39 req-0c6d5fed-1efa-4f76-8257-773e5cab989b service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Neutron deleted interface 891c589c-0854-41a8-8eb2-e06ac9124837; detaching it from the instance and deleting it from the info cache [ 934.464315] env[65758]: DEBUG nova.network.neutron [req-666df26e-dd78-4853-af6a-3730fafabd39 req-0c6d5fed-1efa-4f76-8257-773e5cab989b service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 934.509387] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbeed55-1f12-444c-8095-b74fc49f081a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.526659] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0bab2e-95f6-434e-943b-a7be1d036ded {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.567837] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.568641] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024a0d66-8705-40c8-a162-2bc0145c7d0e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.579374] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ddfbd4d-438f-4e88-9d9f-54c94e91c548 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.596543] env[65758]: DEBUG nova.compute.provider_tree [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.791244] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.791541] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.791748] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.791927] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.792115] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.794527] env[65758]: INFO nova.compute.manager [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Terminating instance [ 934.901105] env[65758]: DEBUG nova.network.neutron [-] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 934.966030] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-833e7d08-8dd5-4c25-9f35-6640e6465417 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.975356] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9781734-e2cd-4a69-b706-56c5ea1916e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.008975] env[65758]: DEBUG nova.compute.manager [req-666df26e-dd78-4853-af6a-3730fafabd39 req-0c6d5fed-1efa-4f76-8257-773e5cab989b service nova] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Detach interface failed, port_id=891c589c-0854-41a8-8eb2-e06ac9124837, reason: Instance 454bd092-f683-4a3a-91c9-65191d6996f4 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 935.025240] env[65758]: DEBUG nova.compute.manager [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 935.026225] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c35ce1-5428-4c75-a6c8-c6147f835117 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.099566] env[65758]: DEBUG nova.scheduler.client.report [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.301483] env[65758]: DEBUG nova.compute.manager [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 935.301712] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.302650] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16974094-8f3d-4175-945c-3b0a4f0d9271 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.311882] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.312106] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6473edbf-4fc6-46eb-888e-19608758c7fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.319485] env[65758]: DEBUG oslo_vmware.api [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 935.319485] env[65758]: value = "task-4660739" [ 935.319485] env[65758]: _type = "Task" [ 935.319485] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.329235] env[65758]: DEBUG oslo_vmware.api [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660739, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.405015] env[65758]: INFO nova.compute.manager [-] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Took 1.29 seconds to deallocate network for instance. [ 935.538265] env[65758]: INFO nova.compute.manager [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] instance snapshotting [ 935.541839] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c0a074-ae56-4d7f-82db-b97e74c3a6fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.568915] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4f06f4-465c-46da-926b-3fb627232b0c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.606575] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.032s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.609945] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.198s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.634666] env[65758]: INFO nova.scheduler.client.report [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Deleted allocations for instance 2d787237-26e5-4519-9f6e-1d30b9d016cf [ 935.836620] env[65758]: DEBUG oslo_vmware.api [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660739, 'name': PowerOffVM_Task, 'duration_secs': 0.317766} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.836963] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 935.837181] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 935.837488] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3075e21c-fc0a-4e22-b00a-e24955098741 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.913979] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.916832] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.917108] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.917269] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleting the datastore file [datastore1] 83fa942b-a195-4bcb-9ed5-5bb6764220a4 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.917576] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4b23465-e96c-4f01-b3ab-859f5f9fa07a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.925789] env[65758]: DEBUG oslo_vmware.api [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for the task: (returnval){ [ 935.925789] env[65758]: value = "task-4660741" [ 935.925789] env[65758]: _type = "Task" [ 935.925789] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.935047] env[65758]: DEBUG oslo_vmware.api [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660741, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.080212] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 936.080915] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c3ca0ce5-f28b-4486-83ae-f4ae0f6cbf78 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.088227] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 936.088227] env[65758]: value = "task-4660742" [ 936.088227] env[65758]: _type = "Task" [ 936.088227] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.098862] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660742, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.149113] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5addc9e-406f-407b-bf27-de0ffacda5c0 tempest-ServersAdminTestJSON-929686250 tempest-ServersAdminTestJSON-929686250-project-member] Lock "2d787237-26e5-4519-9f6e-1d30b9d016cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.274s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.438843] env[65758]: DEBUG oslo_vmware.api [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Task: {'id': task-4660741, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151039} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.438843] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.438843] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.439279] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.439279] env[65758]: INFO nova.compute.manager [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 936.439381] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 936.439636] env[65758]: DEBUG nova.compute.manager [-] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 936.439689] env[65758]: DEBUG nova.network.neutron [-] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 936.439912] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 936.440485] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 936.440894] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 936.516608] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 936.599090] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660742, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.652045] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 37aadd44-79e8-4479-862f-265549c9d802 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.652213] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 83fa942b-a195-4bcb-9ed5-5bb6764220a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.652329] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ec1e2845-e73a-40ff-9b6c-1d8281859fba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.652436] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 974d06c1-2704-4a78-bbd7-f54335c4288e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.652541] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e6159a35-f073-4931-b0b0-832a88680356 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.652644] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.652747] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.652847] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 7f5911fb-785e-444c-9408-c6884e06c5d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.652946] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.653141] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 454bd092-f683-4a3a-91c9-65191d6996f4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 936.653295] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a9ec9a64-94c7-41a5-a7a4-5e034ddfc592 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 936.653411] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance df46c28d-7cbd-490e-8db2-9730e4d9f953 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.653522] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e93528eb-33d0-46d1-94e8-d1d66f2c682f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.653591] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ba3153f2-8e6f-469c-8730-957c5eebe97b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.653707] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 875cbc88-f817-4ea8-a969-b97e875918d1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 936.653818] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 12c27fac-98e9-486d-bf36-0580a4e0a163 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 936.653925] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance be3de9bd-da98-4c7e-ad7c-933245523695 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 936.654670] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance aa2f1858-2bb2-4f12-bc05-ef6913ef36e2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 936.655210] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 1ff48e58-9240-466d-bec4-51394e550c34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 937.014147] env[65758]: DEBUG nova.compute.manager [req-d2752f4e-5773-452e-b704-3a3eb56f53de req-9261d55f-7a6f-47ef-8d2f-2e1d184ce2f6 service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Received event network-vif-deleted-4741e651-cd1e-4ea0-b378-213efedb59d4 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 937.014606] env[65758]: INFO nova.compute.manager [req-d2752f4e-5773-452e-b704-3a3eb56f53de req-9261d55f-7a6f-47ef-8d2f-2e1d184ce2f6 service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Neutron deleted interface 4741e651-cd1e-4ea0-b378-213efedb59d4; detaching it from the instance and deleting it from the info cache [ 937.014606] env[65758]: DEBUG nova.network.neutron [req-d2752f4e-5773-452e-b704-3a3eb56f53de req-9261d55f-7a6f-47ef-8d2f-2e1d184ce2f6 service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 937.101873] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660742, 'name': CreateSnapshot_Task, 'duration_secs': 0.908087} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.102424] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 937.103397] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2019b7a8-4e77-4e08-94bc-9752820d70f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.158858] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 3ff9192b-3956-49f6-afd2-827759826056 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 937.307578] env[65758]: DEBUG nova.network.neutron [-] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 937.520900] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72a4b6e7-e6b2-4f1d-b5a9-aff0e5ac37d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.531380] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b5b941-2240-4f60-8258-380947d427c5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.566884] env[65758]: DEBUG nova.compute.manager [req-d2752f4e-5773-452e-b704-3a3eb56f53de req-9261d55f-7a6f-47ef-8d2f-2e1d184ce2f6 service nova] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Detach interface failed, port_id=4741e651-cd1e-4ea0-b378-213efedb59d4, reason: Instance 83fa942b-a195-4bcb-9ed5-5bb6764220a4 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 937.624751] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 937.625671] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-11343ab3-0cb0-428c-a915-f171fe8fc355 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.635933] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 937.635933] env[65758]: value = "task-4660743" [ 937.635933] env[65758]: _type = "Task" [ 937.635933] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.648180] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660743, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.662142] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance d5d27a5c-afe4-49a1-a385-0a8f625b5a1e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 937.812939] env[65758]: INFO nova.compute.manager [-] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Took 1.37 seconds to deallocate network for instance. [ 938.148078] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660743, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.167283] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 63b744d2-541a-42e3-9717-b06a4459fd50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 938.227731] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquiring lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.227958] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.320064] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.647888] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660743, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.672615] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 33098961-060f-4503-a805-6ae7351b45ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 938.733385] env[65758]: DEBUG nova.compute.manager [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 939.148412] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660743, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.177518] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 76ec31e6-65c2-4290-9ec0-b274be95baa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 939.177824] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 939.177988] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3264MB phys_disk=100GB used_disk=15GB total_vcpus=48 used_vcpus=14 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '14', 'num_vm_active': '13', 'num_task_deleting': '1', 'num_os_type_None': '14', 'num_proj_cdaabf2897064b5a948dbdb6d5921d76': '1', 'io_workload': '0', 'num_task_None': '12', 'num_proj_e114eef3998848699a9a086fee86db29': '2', 'num_proj_693b129cd84f4eee9971e7221e92c3e0': '3', 'num_vm_rescued': '1', 'num_proj_c4c2ab2b80c04c38bfb4c7cafac87fe6': '2', 'num_proj_111dc87614bb42e2bc66ae1bfb092795': '1', 'num_proj_16188c7bd36d4b0eaffdc980b71ac727': '1', 'num_proj_5bed522365ca465f90708212bdb65510': '1', 'num_proj_45aad313d10447e9ba61ed0a05b915ba': '1', 'num_proj_64ffccae76ed401582dd915ae5f87922': '1', 'num_task_image_pending_upload': '1', 'num_proj_3e3a324879d646699f950687546ea861': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 939.259841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.587546] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c670391-01b5-4a3f-881a-726dc1408662 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.601217] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165b13a5-89a2-4efc-8691-b5509bca0024 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.639111] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90fce30-3f59-4fa2-a3b4-c526afc7c98b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.653166] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1721e0-2892-4db5-944a-1a8a51b99ba4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.657340] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660743, 'name': CloneVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.667739] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.151220] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660743, 'name': CloneVM_Task, 'duration_secs': 2.026994} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.151581] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Created linked-clone VM from snapshot [ 940.152456] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0e2979-7d03-444c-b646-fa846b8b3331 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.163383] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Uploading image fe1141ac-cd89-43cf-a723-116931d6815e {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 940.171208] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.193204] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 940.193204] env[65758]: value = "vm-909966" [ 940.193204] env[65758]: _type = "VirtualMachine" [ 940.193204] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 940.194247] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-51bfdad8-3e47-4d09-979c-83c6476d3001 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.204925] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lease: (returnval){ [ 940.204925] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528a3260-8fe1-1b12-0362-741ccf2d241d" [ 940.204925] env[65758]: _type = "HttpNfcLease" [ 940.204925] env[65758]: } obtained for exporting VM: (result){ [ 940.204925] env[65758]: value = "vm-909966" [ 940.204925] env[65758]: _type = "VirtualMachine" [ 940.204925] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 940.205221] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the lease: (returnval){ [ 940.205221] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528a3260-8fe1-1b12-0362-741ccf2d241d" [ 940.205221] env[65758]: _type = "HttpNfcLease" [ 940.205221] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 940.214362] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 940.214362] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528a3260-8fe1-1b12-0362-741ccf2d241d" [ 940.214362] env[65758]: _type = "HttpNfcLease" [ 940.214362] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 940.676366] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 940.676625] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.067s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.676902] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.674s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.678454] env[65758]: INFO nova.compute.claims [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.715260] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 940.715260] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528a3260-8fe1-1b12-0362-741ccf2d241d" [ 940.715260] env[65758]: _type = "HttpNfcLease" [ 940.715260] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 940.716035] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 940.716035] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528a3260-8fe1-1b12-0362-741ccf2d241d" [ 940.716035] env[65758]: _type = "HttpNfcLease" [ 940.716035] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 940.716391] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4157c211-c430-4960-bdfe-b13fb5615595 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.726125] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5253e47f-8a99-9f08-3f4d-f9a9624de289/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 940.726326] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5253e47f-8a99-9f08-3f4d-f9a9624de289/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 940.855858] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f9b3ab54-0881-4a94-a4c5-a636c7945067 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.123012] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f694184-cd46-4cf6-a46b-b8a9f9d63690 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.131497] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96282e89-e78b-4ffe-a166-7309b3c6eff0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.162959] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0ec22f-c6a8-411f-bf95-8726e492abd4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.171941] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed085b62-9a3d-4132-ba5a-d578562b3241 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.188810] env[65758]: DEBUG nova.compute.provider_tree [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.692287] env[65758]: DEBUG nova.scheduler.client.report [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 943.198883] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.199458] env[65758]: DEBUG nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 943.202656] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.734s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.202656] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.207693] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.184s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.209902] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.212969] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.606s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.212969] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.215055] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.697s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.216556] env[65758]: INFO nova.compute.claims [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.251406] env[65758]: INFO nova.scheduler.client.report [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted allocations for instance 12c27fac-98e9-486d-bf36-0580a4e0a163 [ 943.253975] env[65758]: INFO nova.scheduler.client.report [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted allocations for instance a9ec9a64-94c7-41a5-a7a4-5e034ddfc592 [ 943.264654] env[65758]: INFO nova.scheduler.client.report [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Deleted allocations for instance 875cbc88-f817-4ea8-a969-b97e875918d1 [ 943.390668] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-ba3153f2-8e6f-469c-8730-957c5eebe97b-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.390959] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-ba3153f2-8e6f-469c-8730-957c5eebe97b-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.391342] env[65758]: DEBUG nova.objects.instance [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'flavor' on Instance uuid ba3153f2-8e6f-469c-8730-957c5eebe97b {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.722959] env[65758]: DEBUG nova.compute.utils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 943.726749] env[65758]: DEBUG nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 943.727414] env[65758]: DEBUG nova.network.neutron [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 943.727813] env[65758]: WARNING neutronclient.v2_0.client [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 943.728270] env[65758]: WARNING neutronclient.v2_0.client [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 943.729645] env[65758]: WARNING openstack [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 943.729917] env[65758]: WARNING openstack [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 943.739568] env[65758]: DEBUG nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 943.773670] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7515396b-9ce6-4318-85fb-055658344e8e tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "a9ec9a64-94c7-41a5-a7a4-5e034ddfc592" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.373s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.777860] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9441e0e8-3b4a-4cd0-841a-4434d4930bd1 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "12c27fac-98e9-486d-bf36-0580a4e0a163" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.845s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.782456] env[65758]: DEBUG oslo_concurrency.lockutils [None req-588d6d21-9362-418c-b2ed-dbd7f076d1c2 tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "875cbc88-f817-4ea8-a969-b97e875918d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.752s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.801279] env[65758]: DEBUG nova.policy [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b6e413458a84a9b8f2b6dcd0061fc33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd550f85853f447bb91a89b6bc6c5720', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 943.896099] env[65758]: WARNING neutronclient.v2_0.client [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 943.896792] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 943.897170] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 944.030732] env[65758]: DEBUG nova.objects.instance [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'pci_requests' on Instance uuid ba3153f2-8e6f-469c-8730-957c5eebe97b {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.193722] env[65758]: DEBUG nova.network.neutron [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Successfully created port: 1a0cdbf3-b230-4f89-999a-4886f142722c {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 944.539783] env[65758]: DEBUG nova.objects.base [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 944.539783] env[65758]: DEBUG nova.network.neutron [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 944.539783] env[65758]: WARNING neutronclient.v2_0.client [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 944.539783] env[65758]: WARNING neutronclient.v2_0.client [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 944.539783] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 944.539783] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 944.596399] env[65758]: DEBUG nova.policy [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 944.651118] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e043b6-81ac-4921-832f-9b3cd14feb4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.660036] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17dd87c-07e0-4be8-a22e-c40126db5b3c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.694770] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b24523-6e3e-4534-9973-a7809d5a13c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.704886] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2e401c-c1a7-422d-847e-c314df9d7351 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.719627] env[65758]: DEBUG nova.compute.provider_tree [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.752828] env[65758]: DEBUG nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 944.783456] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 944.783771] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.783955] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 944.784170] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.784459] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 944.784549] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 944.784779] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 944.784965] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 944.785183] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 944.785382] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 944.785582] env[65758]: DEBUG nova.virt.hardware [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 944.786564] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aed8424-1750-441d-bca8-f324c11a2d2c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.797777] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb322ca-f3ad-4e37-8b45-30f2896c8e7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.994704] env[65758]: DEBUG nova.network.neutron [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Successfully created port: ffa44f11-41da-49d5-af63-8c9328cd2c67 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 945.155505] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.155784] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.222604] env[65758]: DEBUG nova.scheduler.client.report [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.658694] env[65758]: DEBUG nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 945.728880] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.729682] env[65758]: DEBUG nova.compute.manager [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 945.733010] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.136s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.733320] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.736459] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.312s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.738421] env[65758]: INFO nova.compute.claims [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.762313] env[65758]: INFO nova.scheduler.client.report [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Deleted allocations for instance aa2f1858-2bb2-4f12-bc05-ef6913ef36e2 [ 945.831649] env[65758]: DEBUG nova.network.neutron [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Successfully updated port: 1a0cdbf3-b230-4f89-999a-4886f142722c {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 946.182447] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.244055] env[65758]: DEBUG nova.compute.utils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.247877] env[65758]: DEBUG nova.compute.manager [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 946.271577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8865392c-9eb0-47d0-8483-e4218d3e7686 tempest-ServerPasswordTestJSON-1142614547 tempest-ServerPasswordTestJSON-1142614547-project-member] Lock "aa2f1858-2bb2-4f12-bc05-ef6913ef36e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.143s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.334809] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.336767] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.336767] env[65758]: DEBUG nova.network.neutron [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 946.658973] env[65758]: DEBUG nova.network.neutron [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Successfully updated port: ffa44f11-41da-49d5-af63-8c9328cd2c67 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 946.753684] env[65758]: DEBUG nova.compute.manager [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 946.839801] env[65758]: WARNING openstack [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 946.840233] env[65758]: WARNING openstack [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 946.881757] env[65758]: DEBUG nova.network.neutron [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 946.951676] env[65758]: DEBUG nova.compute.manager [req-472e3c37-36ff-418f-b607-2c5dff929185 req-68a974f0-bbed-443a-8f3d-7746f4d6f7b4 service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Received event network-vif-plugged-1a0cdbf3-b230-4f89-999a-4886f142722c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 946.951676] env[65758]: DEBUG oslo_concurrency.lockutils [req-472e3c37-36ff-418f-b607-2c5dff929185 req-68a974f0-bbed-443a-8f3d-7746f4d6f7b4 service nova] Acquiring lock "3ff9192b-3956-49f6-afd2-827759826056-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.951676] env[65758]: DEBUG oslo_concurrency.lockutils [req-472e3c37-36ff-418f-b607-2c5dff929185 req-68a974f0-bbed-443a-8f3d-7746f4d6f7b4 service nova] Lock "3ff9192b-3956-49f6-afd2-827759826056-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.951676] env[65758]: DEBUG oslo_concurrency.lockutils [req-472e3c37-36ff-418f-b607-2c5dff929185 req-68a974f0-bbed-443a-8f3d-7746f4d6f7b4 service nova] Lock "3ff9192b-3956-49f6-afd2-827759826056-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.951879] env[65758]: DEBUG nova.compute.manager [req-472e3c37-36ff-418f-b607-2c5dff929185 req-68a974f0-bbed-443a-8f3d-7746f4d6f7b4 service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] No waiting events found dispatching network-vif-plugged-1a0cdbf3-b230-4f89-999a-4886f142722c {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 946.951991] env[65758]: WARNING nova.compute.manager [req-472e3c37-36ff-418f-b607-2c5dff929185 req-68a974f0-bbed-443a-8f3d-7746f4d6f7b4 service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Received unexpected event network-vif-plugged-1a0cdbf3-b230-4f89-999a-4886f142722c for instance with vm_state building and task_state spawning. [ 946.971694] env[65758]: WARNING neutronclient.v2_0.client [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 946.972477] env[65758]: WARNING openstack [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 946.972881] env[65758]: WARNING openstack [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 947.049140] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.049294] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.099809] env[65758]: DEBUG nova.network.neutron [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance_info_cache with network_info: [{"id": "1a0cdbf3-b230-4f89-999a-4886f142722c", "address": "fa:16:3e:a2:d0:8b", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0cdbf3-b2", "ovs_interfaceid": "1a0cdbf3-b230-4f89-999a-4886f142722c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 947.162714] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.163213] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.163530] env[65758]: DEBUG nova.network.neutron [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 947.189093] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d41e4d3-1390-4bee-9597-d54633dd69cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.200876] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99873e66-b7a3-4b38-9116-420b00d350b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.236748] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5bfb61-346c-4e2b-b5f1-c25ec3f81402 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.246042] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612c250c-977e-4c07-b5ba-39105680f974 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.266301] env[65758]: DEBUG nova.compute.provider_tree [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.553768] env[65758]: DEBUG nova.compute.utils [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 947.602394] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.602900] env[65758]: DEBUG nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Instance network_info: |[{"id": "1a0cdbf3-b230-4f89-999a-4886f142722c", "address": "fa:16:3e:a2:d0:8b", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0cdbf3-b2", "ovs_interfaceid": "1a0cdbf3-b230-4f89-999a-4886f142722c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 947.604317] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:d0:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a0cdbf3-b230-4f89-999a-4886f142722c', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.613929] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 947.614279] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.614596] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2db89262-5075-4731-8b0a-7a57e2617edb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.642280] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.642280] env[65758]: value = "task-4660749" [ 947.642280] env[65758]: _type = "Task" [ 947.642280] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.654908] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660749, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.667070] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 947.667508] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 947.716267] env[65758]: WARNING nova.network.neutron [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] 2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4 already exists in list: networks containing: ['2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4']. ignoring it [ 947.769530] env[65758]: DEBUG nova.compute.manager [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 947.774076] env[65758]: DEBUG nova.scheduler.client.report [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.819395] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 947.819755] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 947.819935] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 947.820101] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 947.820244] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 947.820537] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 947.820698] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 947.821527] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 947.821527] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 947.821527] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 947.822049] env[65758]: DEBUG nova.virt.hardware [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 947.823129] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9986f7-6d5d-48e0-9d3d-d27fa5836dc7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.838272] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0f238e-4868-4502-b93f-ea6a32f7d202 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.862902] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.869563] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Creating folder: Project (c9f9d66f86144bf895ef14d494882a3b). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.870027] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d553ca39-aff4-4ddc-b8da-f481aceabaa4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.877364] env[65758]: WARNING neutronclient.v2_0.client [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 947.878217] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 947.878608] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 947.889265] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Created folder: Project (c9f9d66f86144bf895ef14d494882a3b) in parent group-v909763. [ 947.889483] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Creating folder: Instances. Parent ref: group-v909971. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.889748] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64385991-952f-49f3-9839-168ed48645c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.904093] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Created folder: Instances in parent group-v909971. [ 947.904455] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 947.904712] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.904951] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-157e47a0-8bbf-45ab-b927-3995f027ecef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.926403] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.926403] env[65758]: value = "task-4660752" [ 947.926403] env[65758]: _type = "Task" [ 947.926403] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.945763] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660752, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.057590] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.158835] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660749, 'name': CreateVM_Task, 'duration_secs': 0.498094} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.161353] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.161755] env[65758]: WARNING neutronclient.v2_0.client [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 948.163692] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.163692] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.163692] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 948.163692] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc63491e-cd3b-4bb8-984e-85a1bf1a5798 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.171425] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 948.171425] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525003ff-a9cc-7b57-b7ae-9eee0f36a1ff" [ 948.171425] env[65758]: _type = "Task" [ 948.171425] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.190578] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525003ff-a9cc-7b57-b7ae-9eee0f36a1ff, 'name': SearchDatastore_Task, 'duration_secs': 0.010573} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.191201] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.191608] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.191964] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.192190] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.192477] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.192854] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bf035c3-ecb4-4c08-90ac-3b6ab32bfd52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.206219] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.206565] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.207836] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a8bc794-6f63-436e-a6f1-2488182097a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.214885] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 948.214885] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ee60bb-e125-5cde-65ff-007ca6285ca9" [ 948.214885] env[65758]: _type = "Task" [ 948.214885] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.228773] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ee60bb-e125-5cde-65ff-007ca6285ca9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.286025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.286606] env[65758]: DEBUG nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 948.289335] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.023s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.290805] env[65758]: INFO nova.compute.claims [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.441947] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660752, 'name': CreateVM_Task, 'duration_secs': 0.342726} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.442181] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.442688] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.442899] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.443303] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 948.443609] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ea33608-8e6e-40f1-b726-744fd77c7557 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.449735] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 948.449735] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520370a5-89d1-3db5-aee0-9cec91273272" [ 948.449735] env[65758]: _type = "Task" [ 948.449735] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.465839] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520370a5-89d1-3db5-aee0-9cec91273272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.510070] env[65758]: WARNING neutronclient.v2_0.client [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 948.510943] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 948.511399] env[65758]: WARNING openstack [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 948.605754] env[65758]: DEBUG nova.network.neutron [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [{"id": "cdcc66de-e599-4e26-8757-617493c55e00", "address": "fa:16:3e:6f:f2:e7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcc66de-e5", "ovs_interfaceid": "cdcc66de-e599-4e26-8757-617493c55e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ffa44f11-41da-49d5-af63-8c9328cd2c67", "address": "fa:16:3e:70:4e:2c", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffa44f11-41", "ovs_interfaceid": "ffa44f11-41da-49d5-af63-8c9328cd2c67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 948.648141] env[65758]: DEBUG nova.compute.manager [req-1569bda3-063c-4ce2-999d-d53ab8a250f0 req-08184ae0-09ef-4a28-8108-a61f17256cd7 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received event network-vif-plugged-ffa44f11-41da-49d5-af63-8c9328cd2c67 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 948.648419] env[65758]: DEBUG oslo_concurrency.lockutils [req-1569bda3-063c-4ce2-999d-d53ab8a250f0 req-08184ae0-09ef-4a28-8108-a61f17256cd7 service nova] Acquiring lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.648724] env[65758]: DEBUG oslo_concurrency.lockutils [req-1569bda3-063c-4ce2-999d-d53ab8a250f0 req-08184ae0-09ef-4a28-8108-a61f17256cd7 service nova] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.648897] env[65758]: DEBUG oslo_concurrency.lockutils [req-1569bda3-063c-4ce2-999d-d53ab8a250f0 req-08184ae0-09ef-4a28-8108-a61f17256cd7 service nova] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.649118] env[65758]: DEBUG nova.compute.manager [req-1569bda3-063c-4ce2-999d-d53ab8a250f0 req-08184ae0-09ef-4a28-8108-a61f17256cd7 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] No waiting events found dispatching network-vif-plugged-ffa44f11-41da-49d5-af63-8c9328cd2c67 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 948.649355] env[65758]: WARNING nova.compute.manager [req-1569bda3-063c-4ce2-999d-d53ab8a250f0 req-08184ae0-09ef-4a28-8108-a61f17256cd7 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received unexpected event network-vif-plugged-ffa44f11-41da-49d5-af63-8c9328cd2c67 for instance with vm_state active and task_state None. [ 948.727022] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ee60bb-e125-5cde-65ff-007ca6285ca9, 'name': SearchDatastore_Task, 'duration_secs': 0.013108} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.727656] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bde79c3a-7def-45ea-ab38-2aba5a78a2fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.734678] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 948.734678] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a315b5-d175-32e2-a618-8428a6d0ecb8" [ 948.734678] env[65758]: _type = "Task" [ 948.734678] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.744322] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a315b5-d175-32e2-a618-8428a6d0ecb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.799261] env[65758]: DEBUG nova.compute.utils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 948.801295] env[65758]: DEBUG nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 948.801519] env[65758]: DEBUG nova.network.neutron [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 948.801843] env[65758]: WARNING neutronclient.v2_0.client [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 948.802146] env[65758]: WARNING neutronclient.v2_0.client [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 948.802888] env[65758]: WARNING openstack [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 948.803511] env[65758]: WARNING openstack [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 948.884910] env[65758]: DEBUG nova.policy [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8898aef8104bf582ec78e9c6a5ee2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8be788d761114dfca7244f953b571c7d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 948.920167] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5253e47f-8a99-9f08-3f4d-f9a9624de289/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 948.921186] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29936a5-ba28-4487-af87-68415c9fc9e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.928341] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5253e47f-8a99-9f08-3f4d-f9a9624de289/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 948.928582] env[65758]: ERROR oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5253e47f-8a99-9f08-3f4d-f9a9624de289/disk-0.vmdk due to incomplete transfer. [ 948.928886] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3ca845f5-c42d-464c-ba22-f762e9c0ee62 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.937853] env[65758]: DEBUG oslo_vmware.rw_handles [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5253e47f-8a99-9f08-3f4d-f9a9624de289/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 948.938078] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Uploaded image fe1141ac-cd89-43cf-a723-116931d6815e to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 948.940376] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 948.940685] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-abf65221-78d5-4533-92d6-895abe373ec1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.948051] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 948.948051] env[65758]: value = "task-4660753" [ 948.948051] env[65758]: _type = "Task" [ 948.948051] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.961804] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660753, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.965453] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520370a5-89d1-3db5-aee0-9cec91273272, 'name': SearchDatastore_Task, 'duration_secs': 0.010698} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.965754] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.965984] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.966220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.110394] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.110935] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.111104] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.112215] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfff5bb1-735e-4d7f-ba5c-792744fb27d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.131187] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.131504] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.131772] env[65758]: INFO nova.compute.manager [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Attaching volume 51ed0fd6-0a9d-417e-be08-c8c05d6bcc05 to /dev/sdb [ 949.134050] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 949.134279] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 949.134485] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 949.134680] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 949.134863] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 949.135040] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 949.135292] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.135501] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 949.135698] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 949.135886] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 949.136103] env[65758]: DEBUG nova.virt.hardware [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 949.142449] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Reconfiguring VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 949.143057] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9b4dfb3-6d46-44d5-9bca-2b8d0a53bbf7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.166184] env[65758]: DEBUG oslo_vmware.api [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 949.166184] env[65758]: value = "task-4660754" [ 949.166184] env[65758]: _type = "Task" [ 949.166184] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.175351] env[65758]: DEBUG oslo_vmware.api [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660754, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.195814] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f35a3e-8268-49a4-bee1-a2ccbc9c0847 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.203948] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ddca79-25ec-4d4e-9b30-9036d8b29e2f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.219018] env[65758]: DEBUG nova.virt.block_device [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updating existing volume attachment record: a59f540b-13e1-4796-ad82-a97f6ab14bf0 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 949.222284] env[65758]: DEBUG nova.network.neutron [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Successfully created port: 83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 949.250452] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a315b5-d175-32e2-a618-8428a6d0ecb8, 'name': SearchDatastore_Task, 'duration_secs': 0.011502} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.250732] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.250989] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 3ff9192b-3956-49f6-afd2-827759826056/3ff9192b-3956-49f6-afd2-827759826056.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.251474] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.251561] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.251774] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3d06201-c6de-4002-b1ed-c080d190dad6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.254139] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86eeab55-aeaa-4d44-84b4-ab5b02dad10f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.262511] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 949.262511] env[65758]: value = "task-4660755" [ 949.262511] env[65758]: _type = "Task" [ 949.262511] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.267598] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.269899] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.269899] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d727db3c-5629-4189-9ddd-4bffc18eb27b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.278446] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.282712] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 949.282712] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f18e49-d8d6-776c-8689-82833856edb9" [ 949.282712] env[65758]: _type = "Task" [ 949.282712] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.292421] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f18e49-d8d6-776c-8689-82833856edb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.312199] env[65758]: DEBUG nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 949.460090] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660753, 'name': Destroy_Task, 'duration_secs': 0.444355} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.466520] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Destroyed the VM [ 949.466723] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 949.468171] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-28e71267-3e59-493b-9535-5bdac3579536 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.477203] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 949.477203] env[65758]: value = "task-4660758" [ 949.477203] env[65758]: _type = "Task" [ 949.477203] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.492733] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660758, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.614965] env[65758]: DEBUG nova.compute.manager [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Received event network-changed-1a0cdbf3-b230-4f89-999a-4886f142722c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 949.615158] env[65758]: DEBUG nova.compute.manager [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Refreshing instance network info cache due to event network-changed-1a0cdbf3-b230-4f89-999a-4886f142722c. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 949.615395] env[65758]: DEBUG oslo_concurrency.lockutils [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] Acquiring lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.615564] env[65758]: DEBUG oslo_concurrency.lockutils [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] Acquired lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.615841] env[65758]: DEBUG nova.network.neutron [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Refreshing network info cache for port 1a0cdbf3-b230-4f89-999a-4886f142722c {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 949.682662] env[65758]: DEBUG oslo_vmware.api [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.727822] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f8d19b-9f45-4ec3-8ee5-cf3bc2494900 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.737504] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb78d5f6-4638-4ed6-9316-d8d5d9928af2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.806809] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38461419-00d2-4b4e-ad88-5fa86c049e34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.822857] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660755, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.829286] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f18e49-d8d6-776c-8689-82833856edb9, 'name': SearchDatastore_Task, 'duration_secs': 0.012204} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.830830] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa33aca0-0a6a-431e-b303-352c473972fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.836714] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e3c8170-2fbb-4962-9237-e687fc750ca0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.845137] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 949.845137] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f32165-49cb-7c4b-5a48-76f83671351e" [ 949.845137] env[65758]: _type = "Task" [ 949.845137] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.857089] env[65758]: DEBUG nova.compute.provider_tree [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.873016] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f32165-49cb-7c4b-5a48-76f83671351e, 'name': SearchDatastore_Task, 'duration_secs': 0.015393} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.873911] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.874329] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e/d5d27a5c-afe4-49a1-a385-0a8f625b5a1e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.874505] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9309dd35-abe4-40a9-9caa-0b6781e15620 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.885192] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 949.885192] env[65758]: value = "task-4660761" [ 949.885192] env[65758]: _type = "Task" [ 949.885192] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.895736] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660761, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.989442] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660758, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.123625] env[65758]: WARNING neutronclient.v2_0.client [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 950.124112] env[65758]: WARNING openstack [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 950.124566] env[65758]: WARNING openstack [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 950.178905] env[65758]: DEBUG oslo_vmware.api [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660754, 'name': ReconfigVM_Task, 'duration_secs': 0.939207} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.181814] env[65758]: WARNING neutronclient.v2_0.client [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 950.182204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.182455] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Reconfigured VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 950.313077] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577091} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.313837] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 3ff9192b-3956-49f6-afd2-827759826056/3ff9192b-3956-49f6-afd2-827759826056.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.314058] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.317710] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49bacf7a-0a4a-40e3-8454-93f8390e017d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.329037] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 950.329037] env[65758]: value = "task-4660762" [ 950.329037] env[65758]: _type = "Task" [ 950.329037] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.334276] env[65758]: DEBUG nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 950.343703] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660762, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.366291] env[65758]: DEBUG nova.scheduler.client.report [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.371262] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 950.371884] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 950.371884] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 950.371884] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 950.372140] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 950.372140] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 950.372501] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 950.372501] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 950.372697] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 950.373138] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 950.373138] env[65758]: DEBUG nova.virt.hardware [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 950.374318] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8878df0d-0092-42f1-81ab-0eee42ca626d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.384930] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0262aa9-0aa7-421e-8180-649894d7f456 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.411630] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660761, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.427608] env[65758]: WARNING neutronclient.v2_0.client [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 950.428471] env[65758]: WARNING openstack [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 950.428608] env[65758]: WARNING openstack [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 950.491808] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660758, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.615488] env[65758]: DEBUG nova.network.neutron [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updated VIF entry in instance network info cache for port 1a0cdbf3-b230-4f89-999a-4886f142722c. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 950.615724] env[65758]: DEBUG nova.network.neutron [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance_info_cache with network_info: [{"id": "1a0cdbf3-b230-4f89-999a-4886f142722c", "address": "fa:16:3e:a2:d0:8b", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0cdbf3-b2", "ovs_interfaceid": "1a0cdbf3-b230-4f89-999a-4886f142722c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 950.688703] env[65758]: DEBUG oslo_concurrency.lockutils [None req-629efa44-f306-4b69-b519-5f3eaad907eb tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-ba3153f2-8e6f-469c-8730-957c5eebe97b-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.297s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.810218] env[65758]: DEBUG nova.network.neutron [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Successfully updated port: 83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 950.839566] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660762, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133967} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.839873] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.840707] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280dd9be-0bfe-466b-9713-6a33d5f8eda2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.865058] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 3ff9192b-3956-49f6-afd2-827759826056/3ff9192b-3956-49f6-afd2-827759826056.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.865449] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4943f0c3-4a88-4dbd-a291-a5cf8819e43a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.881249] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.881901] env[65758]: DEBUG nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 950.885035] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.317s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.886807] env[65758]: INFO nova.compute.claims [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.896322] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 950.896322] env[65758]: value = "task-4660763" [ 950.896322] env[65758]: _type = "Task" [ 950.896322] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.902276] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660761, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671826} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.904493] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e/d5d27a5c-afe4-49a1-a385-0a8f625b5a1e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.904631] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.904871] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35ce0792-a3ba-4cfd-b50d-f1ab6e74cc2f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.914168] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660763, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.915925] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 950.915925] env[65758]: value = "task-4660764" [ 950.915925] env[65758]: _type = "Task" [ 950.915925] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.926838] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660764, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.989763] env[65758]: DEBUG oslo_vmware.api [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660758, 'name': RemoveSnapshot_Task, 'duration_secs': 1.039902} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.990237] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 950.990518] env[65758]: INFO nova.compute.manager [None req-8fc3c160-5aac-4f45-a051-34bb7ec82739 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Took 15.45 seconds to snapshot the instance on the hypervisor. [ 951.002249] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.002613] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.118319] env[65758]: DEBUG oslo_concurrency.lockutils [req-4fb1f75d-083b-4195-94af-477d258403e6 req-56e1350f-760e-4ba5-bc01-dbb71674045c service nova] Releasing lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.160096] env[65758]: DEBUG nova.compute.manager [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received event network-changed-ffa44f11-41da-49d5-af63-8c9328cd2c67 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 951.160385] env[65758]: DEBUG nova.compute.manager [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Refreshing instance network info cache due to event network-changed-ffa44f11-41da-49d5-af63-8c9328cd2c67. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 951.160531] env[65758]: DEBUG oslo_concurrency.lockutils [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] Acquiring lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.160696] env[65758]: DEBUG oslo_concurrency.lockutils [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] Acquired lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.161102] env[65758]: DEBUG nova.network.neutron [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Refreshing network info cache for port ffa44f11-41da-49d5-af63-8c9328cd2c67 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 951.315084] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.315292] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.315460] env[65758]: DEBUG nova.network.neutron [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 951.392656] env[65758]: DEBUG nova.compute.utils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 951.396677] env[65758]: DEBUG nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 951.396954] env[65758]: DEBUG nova.network.neutron [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 951.397230] env[65758]: WARNING neutronclient.v2_0.client [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 951.397523] env[65758]: WARNING neutronclient.v2_0.client [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 951.398123] env[65758]: WARNING openstack [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 951.398473] env[65758]: WARNING openstack [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 951.423355] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660763, 'name': ReconfigVM_Task, 'duration_secs': 0.449319} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.424213] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 3ff9192b-3956-49f6-afd2-827759826056/3ff9192b-3956-49f6-afd2-827759826056.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.425043] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bf2f8f8-43c3-4422-a965-c974e423ef84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.430953] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660764, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.19633} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.431200] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 951.432343] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0855ce-85e9-467d-b0d8-927168a44517 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.436846] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 951.436846] env[65758]: value = "task-4660765" [ 951.436846] env[65758]: _type = "Task" [ 951.436846] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.460039] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e/d5d27a5c-afe4-49a1-a385-0a8f625b5a1e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.463683] env[65758]: DEBUG nova.policy [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3acf0a8cd564f81914c7f95a3c4dce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3216444936b0444184f3cbb1497fffc6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 951.466206] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f2ac56f-965c-4c90-8539-fc75fa21186a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.488276] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660765, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.499435] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 951.499435] env[65758]: value = "task-4660767" [ 951.499435] env[65758]: _type = "Task" [ 951.499435] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.508040] env[65758]: DEBUG nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 951.517871] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660767, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.663776] env[65758]: WARNING neutronclient.v2_0.client [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 951.664659] env[65758]: WARNING openstack [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 951.665108] env[65758]: WARNING openstack [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 951.732055] env[65758]: DEBUG nova.compute.manager [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received event network-vif-plugged-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 951.732191] env[65758]: DEBUG oslo_concurrency.lockutils [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Acquiring lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.732445] env[65758]: DEBUG oslo_concurrency.lockutils [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.732582] env[65758]: DEBUG oslo_concurrency.lockutils [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.732745] env[65758]: DEBUG nova.compute.manager [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] No waiting events found dispatching network-vif-plugged-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 951.732906] env[65758]: WARNING nova.compute.manager [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received unexpected event network-vif-plugged-83c394c9-9b0d-40ad-923c-00e70d63c85a for instance with vm_state building and task_state spawning. [ 951.733068] env[65758]: DEBUG nova.compute.manager [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received event network-changed-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 951.733232] env[65758]: DEBUG nova.compute.manager [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Refreshing instance network info cache due to event network-changed-83c394c9-9b0d-40ad-923c-00e70d63c85a. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 951.733407] env[65758]: DEBUG oslo_concurrency.lockutils [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Acquiring lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.799237] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.799550] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.799737] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.800371] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.800686] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.802656] env[65758]: INFO nova.compute.manager [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Terminating instance [ 951.820711] env[65758]: WARNING openstack [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 951.821132] env[65758]: WARNING openstack [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 951.871559] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17f8e24-2f32-4f6d-8c42-39e40ef33cfe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.878192] env[65758]: DEBUG nova.network.neutron [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Successfully created port: 2bcc5488-41d1-43a9-8b40-17b8081f4a31 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 951.883474] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26eae057-60d6-46a6-800b-eca5b5c799d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.917122] env[65758]: DEBUG nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 951.924383] env[65758]: DEBUG nova.network.neutron [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 951.927011] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35842c34-9eb9-4b5f-a70c-ba6bf5057a52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.938062] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9059976f-46ef-45ab-8267-2bad35ea34f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.954597] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660765, 'name': Rename_Task, 'duration_secs': 0.157455} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.965927] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.966494] env[65758]: DEBUG nova.compute.provider_tree [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.971404] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61ca22b6-d8db-4ec9-8d9a-615ca13f3064 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.980223] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 951.980223] env[65758]: value = "task-4660769" [ 951.980223] env[65758]: _type = "Task" [ 951.980223] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.989949] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.024184] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660767, 'name': ReconfigVM_Task, 'duration_secs': 0.31847} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.024788] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Reconfigured VM instance instance-0000004e to attach disk [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e/d5d27a5c-afe4-49a1-a385-0a8f625b5a1e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.025449] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d5fbfc3-ad80-4813-98be-b693f677535b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.035890] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 952.035890] env[65758]: value = "task-4660770" [ 952.035890] env[65758]: _type = "Task" [ 952.035890] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.039682] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.049282] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660770, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.079931] env[65758]: WARNING neutronclient.v2_0.client [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 952.080703] env[65758]: WARNING openstack [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 952.081146] env[65758]: WARNING openstack [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 952.191195] env[65758]: WARNING neutronclient.v2_0.client [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 952.191951] env[65758]: WARNING openstack [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 952.192394] env[65758]: WARNING openstack [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 952.306197] env[65758]: DEBUG nova.compute.manager [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 952.306440] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.307355] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c940c4ee-fe0f-418d-8ca3-0689ca6e1790 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.315731] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.316017] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8002ae4-f0dc-43ed-9d83-90c373ef69f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.323316] env[65758]: DEBUG oslo_vmware.api [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 952.323316] env[65758]: value = "task-4660771" [ 952.323316] env[65758]: _type = "Task" [ 952.323316] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.332869] env[65758]: DEBUG oslo_vmware.api [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.335996] env[65758]: DEBUG nova.network.neutron [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updated VIF entry in instance network info cache for port ffa44f11-41da-49d5-af63-8c9328cd2c67. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 952.336181] env[65758]: DEBUG nova.network.neutron [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [{"id": "cdcc66de-e599-4e26-8757-617493c55e00", "address": "fa:16:3e:6f:f2:e7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcc66de-e5", "ovs_interfaceid": "cdcc66de-e599-4e26-8757-617493c55e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ffa44f11-41da-49d5-af63-8c9328cd2c67", "address": "fa:16:3e:70:4e:2c", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffa44f11-41", "ovs_interfaceid": "ffa44f11-41da-49d5-af63-8c9328cd2c67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 952.442036] env[65758]: DEBUG nova.network.neutron [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [{"id": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "address": "fa:16:3e:01:98:57", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c394c9-9b", "ovs_interfaceid": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 952.475784] env[65758]: DEBUG nova.scheduler.client.report [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 952.493949] env[65758]: DEBUG oslo_vmware.api [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660769, 'name': PowerOnVM_Task, 'duration_secs': 0.511133} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.494186] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.494462] env[65758]: INFO nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Took 7.74 seconds to spawn the instance on the hypervisor. [ 952.494650] env[65758]: DEBUG nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 952.495873] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d033715f-8b41-4351-a9df-1f739d497b37 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.525303] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-ba3153f2-8e6f-469c-8730-957c5eebe97b-ffa44f11-41da-49d5-af63-8c9328cd2c67" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.525518] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-ba3153f2-8e6f-469c-8730-957c5eebe97b-ffa44f11-41da-49d5-af63-8c9328cd2c67" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.548802] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660770, 'name': Rename_Task, 'duration_secs': 0.15742} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.549105] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.549374] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-646b0e26-39b2-4c84-8310-e26e9c973a7e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.558296] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 952.558296] env[65758]: value = "task-4660772" [ 952.558296] env[65758]: _type = "Task" [ 952.558296] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.569489] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660772, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.834218] env[65758]: DEBUG oslo_vmware.api [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660771, 'name': PowerOffVM_Task, 'duration_secs': 0.220942} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.834545] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.834726] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.835524] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-341d3426-ba8b-4540-9489-1938cda8509b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.839434] env[65758]: DEBUG oslo_concurrency.lockutils [req-527ca779-4382-47db-8805-739c65dea169 req-7f20ed67-be50-4788-bb7c-866f6da4fa2e service nova] Releasing lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.907492] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.907813] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.908091] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Deleting the datastore file [datastore2] 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.908482] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c28daf7-fdec-4a0f-bc59-68453e56dd29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.917432] env[65758]: DEBUG oslo_vmware.api [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for the task: (returnval){ [ 952.917432] env[65758]: value = "task-4660774" [ 952.917432] env[65758]: _type = "Task" [ 952.917432] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.928992] env[65758]: DEBUG oslo_vmware.api [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.936814] env[65758]: DEBUG nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 952.945352] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.945743] env[65758]: DEBUG nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Instance network_info: |[{"id": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "address": "fa:16:3e:01:98:57", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c394c9-9b", "ovs_interfaceid": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 952.946104] env[65758]: DEBUG oslo_concurrency.lockutils [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Acquired lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.946279] env[65758]: DEBUG nova.network.neutron [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Refreshing network info cache for port 83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 952.947585] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:98:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f87a752-ebb0-49a4-a67b-e356fa45b89b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83c394c9-9b0d-40ad-923c-00e70d63c85a', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 952.956401] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating folder: Project (8be788d761114dfca7244f953b571c7d). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 952.957627] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5dd6b390-e857-46db-961c-52b9b1e74845 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.969595] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 952.969950] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 952.970205] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 952.970453] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 952.970629] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 952.970784] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 952.970993] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.971170] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 952.971409] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 952.971530] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 952.971730] env[65758]: DEBUG nova.virt.hardware [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 952.972968] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059ac23c-a074-441c-adc1-2cbe0002d9ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.977774] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Created folder: Project (8be788d761114dfca7244f953b571c7d) in parent group-v909763. [ 952.978045] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating folder: Instances. Parent ref: group-v909976. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 952.978869] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d91424f-1e32-4489-ab6e-3ad0f970fbac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.981297] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.096s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.981825] env[65758]: DEBUG nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 952.989279] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.075s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.989279] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.991258] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.672s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.991532] env[65758]: DEBUG nova.objects.instance [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lazy-loading 'resources' on Instance uuid 83fa942b-a195-4bcb-9ed5-5bb6764220a4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.994537] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63e787a-ecb1-4cde-90a2-cba0ed5b998e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.005021] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Created folder: Instances in parent group-v909976. [ 953.005346] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 953.016418] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.025189] env[65758]: INFO nova.scheduler.client.report [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleted allocations for instance 454bd092-f683-4a3a-91c9-65191d6996f4 [ 953.026864] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0152d04-a0ce-44e2-adad-88fa53ce347b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.049384] env[65758]: INFO nova.compute.manager [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Took 38.07 seconds to build instance. [ 953.051114] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.051114] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.052615] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7265d0e7-3043-48f4-adca-e55973197a1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.077798] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.077798] env[65758]: value = "task-4660777" [ 953.077798] env[65758]: _type = "Task" [ 953.077798] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.079246] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69171bc8-fe4d-47e4-b939-bb915beaa686 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.088680] env[65758]: DEBUG oslo_vmware.api [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660772, 'name': PowerOnVM_Task, 'duration_secs': 0.466473} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.090349] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.090349] env[65758]: INFO nova.compute.manager [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Took 5.32 seconds to spawn the instance on the hypervisor. [ 953.090349] env[65758]: DEBUG nova.compute.manager [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 953.091150] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6e210a-4254-45f6-9435-524ac6017d67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.114029] env[65758]: WARNING neutronclient.v2_0.client [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 953.119722] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Reconfiguring VM to detach interface {{(pid=65758) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 953.119965] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660777, 'name': CreateVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.120979] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1997b633-5754-4b83-b00d-a0276464d174 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.146732] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 953.146732] env[65758]: value = "task-4660778" [ 953.146732] env[65758]: _type = "Task" [ 953.146732] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.156869] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.431068] env[65758]: DEBUG oslo_vmware.api [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Task: {'id': task-4660774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.420355} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.431068] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.431068] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.431068] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.431068] env[65758]: INFO nova.compute.manager [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Took 1.12 seconds to destroy the instance on the hypervisor. [ 953.431068] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 953.431068] env[65758]: DEBUG nova.compute.manager [-] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 953.431068] env[65758]: DEBUG nova.network.neutron [-] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 953.431068] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 953.431068] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 953.431567] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 953.458015] env[65758]: WARNING neutronclient.v2_0.client [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 953.458779] env[65758]: WARNING openstack [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 953.459502] env[65758]: WARNING openstack [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 953.489994] env[65758]: DEBUG nova.compute.utils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 953.495026] env[65758]: DEBUG nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 953.495026] env[65758]: DEBUG nova.network.neutron [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 953.495026] env[65758]: WARNING neutronclient.v2_0.client [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 953.495026] env[65758]: WARNING neutronclient.v2_0.client [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 953.495026] env[65758]: WARNING openstack [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 953.495026] env[65758]: WARNING openstack [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 953.522715] env[65758]: DEBUG nova.compute.manager [req-6b72f79b-3abf-41d8-b447-89179ea9d576 req-910fce5b-7a10-49cb-a9f5-b45ae0bdd61f service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Received event network-vif-plugged-2bcc5488-41d1-43a9-8b40-17b8081f4a31 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 953.523173] env[65758]: DEBUG oslo_concurrency.lockutils [req-6b72f79b-3abf-41d8-b447-89179ea9d576 req-910fce5b-7a10-49cb-a9f5-b45ae0bdd61f service nova] Acquiring lock "33098961-060f-4503-a805-6ae7351b45ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.523173] env[65758]: DEBUG oslo_concurrency.lockutils [req-6b72f79b-3abf-41d8-b447-89179ea9d576 req-910fce5b-7a10-49cb-a9f5-b45ae0bdd61f service nova] Lock "33098961-060f-4503-a805-6ae7351b45ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.523383] env[65758]: DEBUG oslo_concurrency.lockutils [req-6b72f79b-3abf-41d8-b447-89179ea9d576 req-910fce5b-7a10-49cb-a9f5-b45ae0bdd61f service nova] Lock "33098961-060f-4503-a805-6ae7351b45ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.523557] env[65758]: DEBUG nova.compute.manager [req-6b72f79b-3abf-41d8-b447-89179ea9d576 req-910fce5b-7a10-49cb-a9f5-b45ae0bdd61f service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] No waiting events found dispatching network-vif-plugged-2bcc5488-41d1-43a9-8b40-17b8081f4a31 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 953.523874] env[65758]: WARNING nova.compute.manager [req-6b72f79b-3abf-41d8-b447-89179ea9d576 req-910fce5b-7a10-49cb-a9f5-b45ae0bdd61f service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Received unexpected event network-vif-plugged-2bcc5488-41d1-43a9-8b40-17b8081f4a31 for instance with vm_state building and task_state spawning. [ 953.525139] env[65758]: DEBUG nova.network.neutron [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Successfully updated port: 2bcc5488-41d1-43a9-8b40-17b8081f4a31 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 953.547830] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 953.553086] env[65758]: DEBUG oslo_concurrency.lockutils [None req-770b8489-8d2c-41de-9145-90f0c7b8af69 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.581s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.558938] env[65758]: DEBUG oslo_concurrency.lockutils [None req-43876bc3-8fbb-4343-9fec-198874f5c596 tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "454bd092-f683-4a3a-91c9-65191d6996f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.095s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.594879] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660777, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.626941] env[65758]: DEBUG nova.policy [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91358f51732f44198a020f6669168408', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4095654557a34bb0907071aedb3bb678', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 953.655569] env[65758]: INFO nova.compute.manager [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Took 34.16 seconds to build instance. [ 953.662881] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.933316] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad55c694-ee69-43c4-aeaf-bf1ec9954f03 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.946932] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90031c7-bde6-4d79-bfc9-cf3c3fe6ed4d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.985772] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcafe2eb-d850-4897-9dc3-bb6859a4915c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.997502] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e69f775-05af-4448-866f-0e998acc4bb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.002686] env[65758]: DEBUG nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 954.022984] env[65758]: DEBUG nova.compute.provider_tree [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.029228] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "refresh_cache-33098961-060f-4503-a805-6ae7351b45ea" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.029441] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "refresh_cache-33098961-060f-4503-a805-6ae7351b45ea" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.029645] env[65758]: DEBUG nova.network.neutron [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 954.093266] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660777, 'name': CreateVM_Task, 'duration_secs': 0.582623} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.093497] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.094513] env[65758]: WARNING neutronclient.v2_0.client [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 954.094832] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.094983] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.095341] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 954.095635] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55b64cd3-07c9-43a7-9bac-228210cb6753 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.102080] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 954.102080] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520fa1f5-350f-ab1f-4702-4b22d4df80c4" [ 954.102080] env[65758]: _type = "Task" [ 954.102080] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.113367] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520fa1f5-350f-ab1f-4702-4b22d4df80c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.127243] env[65758]: DEBUG nova.network.neutron [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Successfully created port: 2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 954.161012] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e2c22c9d-cc8a-4d38-8998-3e68af709a74 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.672s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.161012] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.283726] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 954.284093] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 954.285179] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c575f966-2692-49a7-88f2-f3123018144f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.310526] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6885cacf-0210-4acf-9a17-88a1915b4cde {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.348507] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05/volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.350337] env[65758]: WARNING neutronclient.v2_0.client [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 954.351192] env[65758]: WARNING openstack [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 954.351418] env[65758]: WARNING openstack [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 954.359354] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-267dad10-15da-4a97-a15d-315a841bf666 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.386883] env[65758]: DEBUG oslo_vmware.api [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 954.386883] env[65758]: value = "task-4660780" [ 954.386883] env[65758]: _type = "Task" [ 954.386883] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.397414] env[65758]: DEBUG oslo_vmware.api [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660780, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.482765] env[65758]: DEBUG nova.network.neutron [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updated VIF entry in instance network info cache for port 83c394c9-9b0d-40ad-923c-00e70d63c85a. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 954.483139] env[65758]: DEBUG nova.network.neutron [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [{"id": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "address": "fa:16:3e:01:98:57", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c394c9-9b", "ovs_interfaceid": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 954.527597] env[65758]: DEBUG nova.scheduler.client.report [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.532175] env[65758]: WARNING openstack [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 954.532768] env[65758]: WARNING openstack [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 954.573276] env[65758]: DEBUG nova.network.neutron [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 954.623988] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520fa1f5-350f-ab1f-4702-4b22d4df80c4, 'name': SearchDatastore_Task, 'duration_secs': 0.015877} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.624149] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.624306] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.624854] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.625030] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.625241] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.625517] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ff5d299-250c-44cb-9716-f4f281c3b0b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.638602] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.638974] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.643656] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ef906c5-32d9-436d-99a9-e6bc2e037e5d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.659601] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 954.659601] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52172195-ab99-7747-5273-cab35e007853" [ 954.659601] env[65758]: _type = "Task" [ 954.659601] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.667392] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.675344] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52172195-ab99-7747-5273-cab35e007853, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.684511] env[65758]: WARNING neutronclient.v2_0.client [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 954.685229] env[65758]: WARNING openstack [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 954.685595] env[65758]: WARNING openstack [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 954.791200] env[65758]: DEBUG nova.network.neutron [-] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 954.838176] env[65758]: DEBUG nova.compute.manager [req-608a92c3-76ee-4e12-a5e3-6e50b37fcfb2 req-8577a5a5-76c2-4b21-8824-31105e22ff04 service nova] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Received event network-vif-deleted-ec4eeaee-4c33-4f1c-93a9-038d455eff39 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 954.855286] env[65758]: DEBUG nova.network.neutron [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Updating instance_info_cache with network_info: [{"id": "2bcc5488-41d1-43a9-8b40-17b8081f4a31", "address": "fa:16:3e:dd:ed:d6", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bcc5488-41", "ovs_interfaceid": "2bcc5488-41d1-43a9-8b40-17b8081f4a31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 954.898638] env[65758]: DEBUG oslo_vmware.api [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.991523] env[65758]: DEBUG oslo_concurrency.lockutils [req-af5fe04e-616c-4047-b534-9b23833d89e0 req-ba78abda-a0c3-4325-ae77-ced5836e2b96 service nova] Releasing lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.992049] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.992282] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.992500] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.992676] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.992863] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.997936] env[65758]: INFO nova.compute.manager [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Terminating instance [ 955.014419] env[65758]: DEBUG nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 955.033301] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.039983] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.780s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.041995] env[65758]: INFO nova.compute.claims [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.052656] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 955.052922] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 955.053104] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 955.053324] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 955.053480] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 955.053678] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 955.053895] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 955.054191] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 955.054326] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 955.054465] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 955.054669] env[65758]: DEBUG nova.virt.hardware [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 955.056073] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809124ec-3e36-4ae1-8c4a-f8141f6060a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.066383] env[65758]: INFO nova.scheduler.client.report [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Deleted allocations for instance 83fa942b-a195-4bcb-9ed5-5bb6764220a4 [ 955.069479] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1638a129-4a21-47e2-a82c-d5ef56e26958 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.169702] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.177990] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52172195-ab99-7747-5273-cab35e007853, 'name': SearchDatastore_Task, 'duration_secs': 0.022736} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.179452] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e990fb30-ac1a-49a5-a79d-ad31e9bc0026 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.188680] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 955.188680] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5242428f-42ab-9520-c219-0effd58c0f2c" [ 955.188680] env[65758]: _type = "Task" [ 955.188680] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.208155] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5242428f-42ab-9520-c219-0effd58c0f2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.281025] env[65758]: DEBUG nova.compute.manager [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 955.301300] env[65758]: INFO nova.compute.manager [-] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Took 1.87 seconds to deallocate network for instance. [ 955.359067] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "refresh_cache-33098961-060f-4503-a805-6ae7351b45ea" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.359446] env[65758]: DEBUG nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Instance network_info: |[{"id": "2bcc5488-41d1-43a9-8b40-17b8081f4a31", "address": "fa:16:3e:dd:ed:d6", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bcc5488-41", "ovs_interfaceid": "2bcc5488-41d1-43a9-8b40-17b8081f4a31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 955.360359] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:ed:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a965790c-2d2f-4c2a-9ee7-745f4d53039b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bcc5488-41d1-43a9-8b40-17b8081f4a31', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.375804] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 955.375804] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.375804] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cee41e62-2d59-4bc7-858f-59ab7bb20d00 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.413275] env[65758]: DEBUG oslo_vmware.api [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660780, 'name': ReconfigVM_Task, 'duration_secs': 0.967201} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.414939] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfigured VM instance instance-00000045 to attach disk [datastore2] volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05/volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.419861] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.419861] env[65758]: value = "task-4660781" [ 955.419861] env[65758]: _type = "Task" [ 955.419861] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.420092] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c909564b-79d6-4106-bf43-2300995aaec8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.441853] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660781, 'name': CreateVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.443429] env[65758]: DEBUG oslo_vmware.api [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 955.443429] env[65758]: value = "task-4660782" [ 955.443429] env[65758]: _type = "Task" [ 955.443429] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.504859] env[65758]: DEBUG nova.compute.manager [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 955.505038] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 955.506582] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741700b6-aa7e-444b-88d7-9333128c999a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.515949] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.516371] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-167181a3-c962-4245-a06e-885bfaa4527f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.525619] env[65758]: DEBUG oslo_vmware.api [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 955.525619] env[65758]: value = "task-4660783" [ 955.525619] env[65758]: _type = "Task" [ 955.525619] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.537093] env[65758]: DEBUG oslo_vmware.api [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.581985] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f99cfab6-c751-4e56-bf74-428a81cad8ae tempest-MigrationsAdminTest-1268559466 tempest-MigrationsAdminTest-1268559466-project-member] Lock "83fa942b-a195-4bcb-9ed5-5bb6764220a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.790s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.670132] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.684409] env[65758]: DEBUG nova.compute.manager [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Received event network-changed-2bcc5488-41d1-43a9-8b40-17b8081f4a31 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 955.684409] env[65758]: DEBUG nova.compute.manager [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Refreshing instance network info cache due to event network-changed-2bcc5488-41d1-43a9-8b40-17b8081f4a31. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 955.684409] env[65758]: DEBUG oslo_concurrency.lockutils [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] Acquiring lock "refresh_cache-33098961-060f-4503-a805-6ae7351b45ea" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.684409] env[65758]: DEBUG oslo_concurrency.lockutils [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] Acquired lock "refresh_cache-33098961-060f-4503-a805-6ae7351b45ea" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.684409] env[65758]: DEBUG nova.network.neutron [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Refreshing network info cache for port 2bcc5488-41d1-43a9-8b40-17b8081f4a31 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 955.696802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.696802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.696802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.696802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.696802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.701342] env[65758]: INFO nova.compute.manager [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Terminating instance [ 955.710031] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5242428f-42ab-9520-c219-0effd58c0f2c, 'name': SearchDatastore_Task, 'duration_secs': 0.031648} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.710883] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.711290] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50/63b744d2-541a-42e3-9717-b06a4459fd50.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.711609] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e228c1d-5f58-4455-a9e5-9f97e7c269a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.722838] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 955.722838] env[65758]: value = "task-4660785" [ 955.722838] env[65758]: _type = "Task" [ 955.722838] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.735490] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660785, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.811625] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.819889] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.944226] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660781, 'name': CreateVM_Task, 'duration_secs': 0.443499} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.944580] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 955.948703] env[65758]: WARNING neutronclient.v2_0.client [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 955.949799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.949918] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.950272] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 955.951110] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ed31702-3d92-4657-9199-ab2a29ea04e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.958513] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 955.958513] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523f5259-803c-06a7-f101-dd2b7861b67e" [ 955.958513] env[65758]: _type = "Task" [ 955.958513] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.963824] env[65758]: DEBUG oslo_vmware.api [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660782, 'name': ReconfigVM_Task, 'duration_secs': 0.205777} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.969514] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 955.984448] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523f5259-803c-06a7-f101-dd2b7861b67e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.057438] env[65758]: DEBUG oslo_vmware.api [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660783, 'name': PowerOffVM_Task, 'duration_secs': 0.263264} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.058285] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.058447] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.058756] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-718b239f-1642-4646-a569-eaed4c8f2a1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.082871] env[65758]: DEBUG nova.scheduler.client.report [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 956.108824] env[65758]: DEBUG nova.scheduler.client.report [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 956.108824] env[65758]: DEBUG nova.compute.provider_tree [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 956.126406] env[65758]: DEBUG nova.scheduler.client.report [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: None {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 956.145022] env[65758]: DEBUG nova.network.neutron [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Successfully updated port: 2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 956.168888] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.171723] env[65758]: DEBUG nova.scheduler.client.report [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 956.179730] env[65758]: WARNING neutronclient.v2_0.client [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 956.180547] env[65758]: WARNING openstack [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 956.181197] env[65758]: WARNING openstack [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 956.218812] env[65758]: DEBUG nova.compute.manager [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 956.219052] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.220201] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b573f21-2317-408b-8c18-0848c6a7faff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.233187] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660785, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.235663] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.238616] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20163bfb-81d7-4d33-8975-d77ec982ff8e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.251118] env[65758]: DEBUG oslo_vmware.api [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 956.251118] env[65758]: value = "task-4660787" [ 956.251118] env[65758]: _type = "Task" [ 956.251118] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.264101] env[65758]: DEBUG oslo_vmware.api [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.455397] env[65758]: WARNING neutronclient.v2_0.client [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 956.456075] env[65758]: WARNING openstack [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 956.456454] env[65758]: WARNING openstack [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 956.480555] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523f5259-803c-06a7-f101-dd2b7861b67e, 'name': SearchDatastore_Task, 'duration_secs': 0.095862} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.481728] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.481728] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.481728] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.483363] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.483363] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.483363] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b73655dc-c640-4702-83c9-34d76be7f384 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.491816] env[65758]: INFO nova.compute.manager [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Rebuilding instance [ 956.507956] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.508429] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.513571] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10d2b8c5-4ea1-4aaf-b44d-b5e156cc66f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.534185] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 956.534185] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5288a557-1969-6be8-c0d7-904a6adf061e" [ 956.534185] env[65758]: _type = "Task" [ 956.534185] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.546773] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5288a557-1969-6be8-c0d7-904a6adf061e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.558627] env[65758]: DEBUG nova.compute.manager [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 956.559781] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7c5c36-0529-4291-8e57-0a69a7fdc713 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.577297] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c312523b-283c-4921-ae76-6f71116e1e23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.586046] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41a29ef-7108-4df4-b988-9adfbdd169f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.619256] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634d6ba8-9643-4e8b-95ff-4cd380461fae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.628784] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbde389-04f6-4e33-a037-9120a5d16e44 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.645064] env[65758]: DEBUG nova.compute.provider_tree [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.648906] env[65758]: DEBUG nova.network.neutron [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Updated VIF entry in instance network info cache for port 2bcc5488-41d1-43a9-8b40-17b8081f4a31. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 956.648906] env[65758]: DEBUG nova.network.neutron [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Updating instance_info_cache with network_info: [{"id": "2bcc5488-41d1-43a9-8b40-17b8081f4a31", "address": "fa:16:3e:dd:ed:d6", "network": {"id": "bdeb4e63-9eba-4a1b-8d98-bec885775c07", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2076474168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3216444936b0444184f3cbb1497fffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bcc5488-41", "ovs_interfaceid": "2bcc5488-41d1-43a9-8b40-17b8081f4a31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 956.650714] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.652710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.653778] env[65758]: DEBUG nova.network.neutron [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 956.665801] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.665801] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.665801] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Deleting the datastore file [datastore1] e93528eb-33d0-46d1-94e8-d1d66f2c682f {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.666149] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e448f8f-277a-4c42-b9b8-d839bf77b44c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.672466] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.679727] env[65758]: DEBUG oslo_vmware.api [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for the task: (returnval){ [ 956.679727] env[65758]: value = "task-4660788" [ 956.679727] env[65758]: _type = "Task" [ 956.679727] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.692279] env[65758]: DEBUG oslo_vmware.api [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.736532] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660785, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.761978] env[65758]: DEBUG oslo_vmware.api [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660787, 'name': PowerOffVM_Task, 'duration_secs': 0.455768} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.762302] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.762640] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.762740] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a586426b-7e39-464b-a993-d5e1790ac8ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.870805] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.871013] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.871242] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleting the datastore file [datastore2] 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.871534] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22ac5971-0137-4907-ab8d-eab77d24512c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.876265] env[65758]: DEBUG nova.compute.manager [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Received event network-vif-plugged-2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 956.876265] env[65758]: DEBUG oslo_concurrency.lockutils [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Acquiring lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.876265] env[65758]: DEBUG oslo_concurrency.lockutils [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.876403] env[65758]: DEBUG oslo_concurrency.lockutils [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.876584] env[65758]: DEBUG nova.compute.manager [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] No waiting events found dispatching network-vif-plugged-2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 956.876749] env[65758]: WARNING nova.compute.manager [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Received unexpected event network-vif-plugged-2e41907c-1553-48df-9644-cb422d2f19df for instance with vm_state building and task_state spawning. [ 956.876861] env[65758]: DEBUG nova.compute.manager [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Received event network-changed-2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 956.876992] env[65758]: DEBUG nova.compute.manager [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Refreshing instance network info cache due to event network-changed-2e41907c-1553-48df-9644-cb422d2f19df. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 956.877173] env[65758]: DEBUG oslo_concurrency.lockutils [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Acquiring lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.883974] env[65758]: DEBUG oslo_vmware.api [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for the task: (returnval){ [ 956.883974] env[65758]: value = "task-4660790" [ 956.883974] env[65758]: _type = "Task" [ 956.883974] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.893838] env[65758]: DEBUG oslo_vmware.api [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.042858] env[65758]: DEBUG nova.objects.instance [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'flavor' on Instance uuid df46c28d-7cbd-490e-8db2-9730e4d9f953 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.051193] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5288a557-1969-6be8-c0d7-904a6adf061e, 'name': SearchDatastore_Task, 'duration_secs': 0.034185} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.052145] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3409a43-fcbd-4d9c-9ad2-bfe66a8e4d87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.059378] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 957.059378] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523a9a48-dea8-0076-6b3b-4985896e0f21" [ 957.059378] env[65758]: _type = "Task" [ 957.059378] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.074393] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523a9a48-dea8-0076-6b3b-4985896e0f21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.156567] env[65758]: DEBUG nova.scheduler.client.report [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.161051] env[65758]: DEBUG oslo_concurrency.lockutils [req-6dc1cd49-43f5-46eb-ac4f-b526c0a02f4d req-495d2af5-e269-4ad1-a5b7-047f571a6fef service nova] Releasing lock "refresh_cache-33098961-060f-4503-a805-6ae7351b45ea" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.162921] env[65758]: WARNING openstack [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 957.162921] env[65758]: WARNING openstack [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 957.183074] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.194493] env[65758]: DEBUG oslo_vmware.api [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.232043] env[65758]: DEBUG nova.network.neutron [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 957.240163] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660785, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.321839] env[65758]: WARNING neutronclient.v2_0.client [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 957.322561] env[65758]: WARNING openstack [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 957.322934] env[65758]: WARNING openstack [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 957.398523] env[65758]: DEBUG oslo_vmware.api [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.468754] env[65758]: DEBUG nova.network.neutron [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [{"id": "2e41907c-1553-48df-9644-cb422d2f19df", "address": "fa:16:3e:b2:e3:b9", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e41907c-15", "ovs_interfaceid": "2e41907c-1553-48df-9644-cb422d2f19df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 957.548375] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cd039502-b663-4f9f-9dfc-b6292f2bc6c9 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.417s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.578292] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.578632] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523a9a48-dea8-0076-6b3b-4985896e0f21, 'name': SearchDatastore_Task, 'duration_secs': 0.032196} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.579820] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-105bb14d-3763-4ef9-a664-305ce6c7f3b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.583695] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.583695] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 33098961-060f-4503-a805-6ae7351b45ea/33098961-060f-4503-a805-6ae7351b45ea.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.583695] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f00759e-dd2b-44b5-b5c2-ae4d2eaf1718 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.591816] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 957.591816] env[65758]: value = "task-4660791" [ 957.591816] env[65758]: _type = "Task" [ 957.591816] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.596639] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 957.596639] env[65758]: value = "task-4660792" [ 957.596639] env[65758]: _type = "Task" [ 957.596639] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.609409] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.615943] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.675887] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.636s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.676462] env[65758]: DEBUG nova.compute.manager [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 957.680573] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.681142] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.499s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.682892] env[65758]: INFO nova.compute.claims [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.701711] env[65758]: DEBUG oslo_vmware.api [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Task: {'id': task-4660788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.729777} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.702158] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.702450] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.702778] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.703029] env[65758]: INFO nova.compute.manager [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Took 2.20 seconds to destroy the instance on the hypervisor. [ 957.703326] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 957.703594] env[65758]: DEBUG nova.compute.manager [-] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 957.704800] env[65758]: DEBUG nova.network.neutron [-] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 957.705147] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 957.707037] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 957.707037] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 957.743297] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660785, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.645761} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.743777] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50/63b744d2-541a-42e3-9717-b06a4459fd50.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.744094] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.746052] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4769ffd4-731f-474d-826b-98eab6237853 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.756617] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 957.756617] env[65758]: value = "task-4660794" [ 957.756617] env[65758]: _type = "Task" [ 957.756617] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.771263] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.783874] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 957.901041] env[65758]: DEBUG oslo_vmware.api [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Task: {'id': task-4660790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.540899} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.901418] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.901647] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.901876] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.902069] env[65758]: INFO nova.compute.manager [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Took 1.68 seconds to destroy the instance on the hypervisor. [ 957.902420] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 957.902611] env[65758]: DEBUG nova.compute.manager [-] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 957.902710] env[65758]: DEBUG nova.network.neutron [-] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 957.902991] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 957.903705] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 957.904013] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 957.973304] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.973304] env[65758]: DEBUG nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Instance network_info: |[{"id": "2e41907c-1553-48df-9644-cb422d2f19df", "address": "fa:16:3e:b2:e3:b9", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e41907c-15", "ovs_interfaceid": "2e41907c-1553-48df-9644-cb422d2f19df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 957.973304] env[65758]: DEBUG oslo_concurrency.lockutils [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Acquired lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.973304] env[65758]: DEBUG nova.network.neutron [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Refreshing network info cache for port 2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 957.975167] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:e3:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '438671d0-9468-4e44-84c1-4c0ebaa743e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e41907c-1553-48df-9644-cb422d2f19df', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.983639] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating folder: Project (4095654557a34bb0907071aedb3bb678). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 957.984343] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31ece833-b8bd-46fe-a568-9e3e9a7957eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.000286] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Created folder: Project (4095654557a34bb0907071aedb3bb678) in parent group-v909763. [ 958.001243] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating folder: Instances. Parent ref: group-v909981. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.001243] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b97116f-4f13-4533-baf2-b19578ec9a10 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.017902] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Created folder: Instances in parent group-v909981. [ 958.018224] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 958.018690] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.018826] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f28803e-f674-484b-bc3b-e576326b800e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.038358] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 958.043102] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.043102] env[65758]: value = "task-4660797" [ 958.043102] env[65758]: _type = "Task" [ 958.043102] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.053282] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660797, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.065265] env[65758]: INFO nova.compute.manager [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Rebuilding instance [ 958.108474] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660791, 'name': PowerOffVM_Task, 'duration_secs': 0.241264} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.109238] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.110030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 958.110866] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea4f3e2-dec3-43e5-b9c8-65c4215d78c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.117498] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660792, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.123867] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 958.126602] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73dbb5d8-6a7e-4dd2-9f86-c0ae7c4d6fdd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.129282] env[65758]: DEBUG nova.compute.manager [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 958.130314] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073212fd-8795-4657-a265-1582d133b663 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.158706] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.158706] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.158706] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Deleting the datastore file [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.159088] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0461ad05-5a1a-4f22-ad26-b3f8f4124c38 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.170740] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 958.170740] env[65758]: value = "task-4660799" [ 958.170740] env[65758]: _type = "Task" [ 958.170740] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.178204] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.182304] env[65758]: DEBUG nova.compute.utils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 958.187380] env[65758]: DEBUG nova.compute.manager [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 958.187663] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.270220] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090775} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.270549] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.271418] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d105d225-39f2-48a2-bdea-05a1e0342f93 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.296933] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50/63b744d2-541a-42e3-9717-b06a4459fd50.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.297366] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e4dcffd-9696-4c51-aa44-8a45e8cad47f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.328928] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 958.328928] env[65758]: value = "task-4660800" [ 958.328928] env[65758]: _type = "Task" [ 958.328928] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.338826] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660800, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.488031] env[65758]: WARNING neutronclient.v2_0.client [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 958.488031] env[65758]: WARNING openstack [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 958.488031] env[65758]: WARNING openstack [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 958.553940] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660797, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.610964] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660792, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619368} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.611489] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 33098961-060f-4503-a805-6ae7351b45ea/33098961-060f-4503-a805-6ae7351b45ea.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 958.611873] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 958.612302] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8658f47c-1326-46c2-a354-48406ef56d5e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.623784] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 958.623784] env[65758]: value = "task-4660801" [ 958.623784] env[65758]: _type = "Task" [ 958.623784] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.635814] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660801, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.680600] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.689504] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144016} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.689504] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.689504] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.689504] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.691780] env[65758]: DEBUG nova.compute.manager [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 958.850327] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.882711] env[65758]: DEBUG nova.network.neutron [-] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 958.931302] env[65758]: DEBUG nova.compute.manager [req-57ee849b-88ca-43d3-adb4-8e3a3a8081c0 req-f462c787-f820-411a-9df8-de9780b77d2c service nova] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Received event network-vif-deleted-25549e11-fab5-4462-b69b-5fa3581f6d34 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 958.984895] env[65758]: WARNING neutronclient.v2_0.client [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 958.985928] env[65758]: WARNING openstack [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 958.986033] env[65758]: WARNING openstack [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 959.057242] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660797, 'name': CreateVM_Task, 'duration_secs': 0.683884} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.057489] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 959.058172] env[65758]: WARNING neutronclient.v2_0.client [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 959.058616] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.058799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.059119] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 959.061826] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c89d494-4de3-41c1-aff5-73fef8477cd0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.067515] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 959.067515] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a15864-79c5-7ade-6d00-235c44cce360" [ 959.067515] env[65758]: _type = "Task" [ 959.067515] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.076151] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a15864-79c5-7ade-6d00-235c44cce360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.134959] env[65758]: DEBUG nova.network.neutron [-] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 959.136800] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660801, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.296058} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.136800] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.140478] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5349554-fa24-4f12-a6c5-e66db7c4d47e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.150581] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.151476] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c384995-b810-488f-9473-c4b110f69c81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.155343] env[65758]: DEBUG nova.network.neutron [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updated VIF entry in instance network info cache for port 2e41907c-1553-48df-9644-cb422d2f19df. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 959.155679] env[65758]: DEBUG nova.network.neutron [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [{"id": "2e41907c-1553-48df-9644-cb422d2f19df", "address": "fa:16:3e:b2:e3:b9", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e41907c-15", "ovs_interfaceid": "2e41907c-1553-48df-9644-cb422d2f19df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 959.177073] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 33098961-060f-4503-a805-6ae7351b45ea/33098961-060f-4503-a805-6ae7351b45ea.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.182717] env[65758]: DEBUG oslo_concurrency.lockutils [req-e815e742-01c7-4ac3-8286-68e1c7b54323 req-0400e2aa-66af-43b4-be44-6316594e9a9d service nova] Releasing lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.186183] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e02a47ea-c5b5-4f36-b28c-51236b130362 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.201841] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 959.201841] env[65758]: value = "task-4660802" [ 959.201841] env[65758]: _type = "Task" [ 959.201841] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.221023] env[65758]: DEBUG oslo_vmware.api [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660778, 'name': ReconfigVM_Task, 'duration_secs': 5.801225} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.222582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.222863] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Reconfigured VM to detach interface {{(pid=65758) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 959.223480] env[65758]: WARNING neutronclient.v2_0.client [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 959.223849] env[65758]: WARNING neutronclient.v2_0.client [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 959.224640] env[65758]: WARNING openstack [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 959.225055] env[65758]: WARNING openstack [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 959.234021] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 959.234021] env[65758]: value = "task-4660803" [ 959.234021] env[65758]: _type = "Task" [ 959.234021] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.234869] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54619c2e-0a07-40d7-ae81-639cf96939aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.243052] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.260339] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640fe806-b9ba-4719-803a-9cddd7775832 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.264747] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660803, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.299312] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77fdbe3-612e-46f7-a887-29a97ff69737 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.310567] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e63b78f-12e2-48ba-8989-f47038715919 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.327435] env[65758]: DEBUG nova.compute.provider_tree [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.343914] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660800, 'name': ReconfigVM_Task, 'duration_secs': 0.93551} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.343914] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50/63b744d2-541a-42e3-9717-b06a4459fd50.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.345267] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9881318d-53bb-415d-836f-63f7d6107b96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.354010] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 959.354010] env[65758]: value = "task-4660804" [ 959.354010] env[65758]: _type = "Task" [ 959.354010] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.365143] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660804, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.390208] env[65758]: INFO nova.compute.manager [-] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Took 1.69 seconds to deallocate network for instance. [ 959.585473] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a15864-79c5-7ade-6d00-235c44cce360, 'name': SearchDatastore_Task, 'duration_secs': 0.045038} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.586298] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.586630] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.586924] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.587143] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.587468] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.587760] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aef9959b-6f08-404c-81ea-021a45dbdd5c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.600261] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.600568] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.601450] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5bd36ef-d3f8-409a-a6b5-293b8fcea0c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.608781] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 959.608781] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b8c6a8-9b14-610d-ab4a-3d58c2f300e3" [ 959.608781] env[65758]: _type = "Task" [ 959.608781] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.619468] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b8c6a8-9b14-610d-ab4a-3d58c2f300e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.637853] env[65758]: INFO nova.compute.manager [-] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Took 1.73 seconds to deallocate network for instance. [ 959.720464] env[65758]: DEBUG nova.compute.manager [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 959.730026] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660802, 'name': PowerOffVM_Task, 'duration_secs': 0.292562} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.730768] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 959.745860] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 959.746118] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 959.746273] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 959.746558] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 959.747227] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 959.747227] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 959.747227] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 959.747227] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 959.747502] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 959.747539] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 959.747710] env[65758]: DEBUG nova.virt.hardware [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 959.750174] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb185d05-2bb7-4b68-bb0e-781f228a3cd3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.767815] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 959.767815] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 959.767815] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 959.767815] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 959.768337] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 959.768337] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 959.768541] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 959.768821] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 959.768821] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 959.768976] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 959.769174] env[65758]: DEBUG nova.virt.hardware [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 959.770107] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b4dd22-56df-436f-8b0f-1ab35f7b3ac9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.781025] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660803, 'name': ReconfigVM_Task, 'duration_secs': 0.324059} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.783856] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97c1a27-0b54-42a3-848b-026f84ebde0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.787799] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 33098961-060f-4503-a805-6ae7351b45ea/33098961-060f-4503-a805-6ae7351b45ea.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.792020] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1636983d-2b7c-4cf2-b926-33cdd4123c02 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.794101] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02637ffd-0a41-499b-a0da-f3bdf9b8882d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.806792] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.813106] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 959.814022] env[65758]: INFO nova.compute.manager [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Detaching volume 51ed0fd6-0a9d-417e-be08-c8c05d6bcc05 [ 959.817336] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.818163] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 959.818163] env[65758]: value = "task-4660805" [ 959.818163] env[65758]: _type = "Task" [ 959.818163] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.818163] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bef4b1b6-2eaa-45da-aec1-322a72016233 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.845901] env[65758]: DEBUG nova.scheduler.client.report [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.853472] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.864496] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Creating folder: Project (bb2a36b52e914a57b4894f1e83a62102). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 959.864496] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-274a9821-b125-4dc3-9a1c-ac30742fa7c5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.873765] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.873765] env[65758]: value = "task-4660806" [ 959.873765] env[65758]: _type = "Task" [ 959.873765] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.874336] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660805, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.882332] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660804, 'name': Rename_Task, 'duration_secs': 0.165299} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.884835] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.884835] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Created folder: Project (bb2a36b52e914a57b4894f1e83a62102) in parent group-v909763. [ 959.885012] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Creating folder: Instances. Parent ref: group-v909984. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 959.885543] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9166ea3-c55c-4172-a15b-51a79dbda8f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.887743] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed5a0c94-e370-487e-920f-96ae4307530d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.894352] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660806, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.898405] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.899708] env[65758]: INFO nova.virt.block_device [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Attempting to driver detach volume 51ed0fd6-0a9d-417e-be08-c8c05d6bcc05 from mountpoint /dev/sdb [ 959.899996] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 959.900275] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 959.901236] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75ec9f1-76a4-487a-8a6f-dda276f963a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.906164] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 959.906164] env[65758]: value = "task-4660809" [ 959.906164] env[65758]: _type = "Task" [ 959.906164] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.929015] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Created folder: Instances in parent group-v909984. [ 959.929374] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 959.930520] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.931360] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575e025e-fef6-4682-bb07-271d992bbb02 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.942217] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bf7f90d-ae0a-4d4a-916e-6de15fd4d4b4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.956801] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660809, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.966894] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b84a151-d5e1-46e2-b3b1-79f586a4f79c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.969915] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.969915] env[65758]: value = "task-4660811" [ 959.969915] env[65758]: _type = "Task" [ 959.969915] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.994906] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c96b98-34c8-4aad-bcbf-01a534ee869e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.004863] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660811, 'name': CreateVM_Task} progress is 15%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.017185] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The volume has not been displaced from its original location: [datastore2] volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05/volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 960.022303] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 960.023185] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb001176-0a14-49c3-83bf-93b0edacb9a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.045426] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 960.045426] env[65758]: value = "task-4660812" [ 960.045426] env[65758]: _type = "Task" [ 960.045426] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.056397] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.119758] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b8c6a8-9b14-610d-ab4a-3d58c2f300e3, 'name': SearchDatastore_Task, 'duration_secs': 0.013615} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.120641] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20ea2a38-51c1-4b33-b711-455737e0be80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.129929] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 960.129929] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a300ab-8815-8ae6-34f2-7ac26eff150c" [ 960.129929] env[65758]: _type = "Task" [ 960.129929] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.150322] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.150775] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a300ab-8815-8ae6-34f2-7ac26eff150c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.345071] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660805, 'name': Rename_Task, 'duration_secs': 0.171181} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.345330] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.345613] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e249f64-7ca2-4d9a-bb4c-71bb0993564b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.360896] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 960.360896] env[65758]: value = "task-4660813" [ 960.360896] env[65758]: _type = "Task" [ 960.360896] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.364699] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.365383] env[65758]: DEBUG nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 960.368692] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.329s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.371073] env[65758]: INFO nova.compute.claims [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.387249] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660813, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.396648] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660806, 'name': CreateVM_Task, 'duration_secs': 0.33066} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.396648] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.396648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.396648] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.397061] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 960.397379] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e2d75eb-88a2-458c-a5ea-40b0e5af35d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.405899] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 960.405899] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5252ba31-d810-6abb-1dd2-7e0b97633f98" [ 960.405899] env[65758]: _type = "Task" [ 960.405899] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.429039] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5252ba31-d810-6abb-1dd2-7e0b97633f98, 'name': SearchDatastore_Task, 'duration_secs': 0.014432} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.434570] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.434964] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.435352] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.435579] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.435849] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.436850] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660809, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.437066] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2b3b8f2-74f0-4a7c-a333-dfba07b3e56c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.450408] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.450737] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.451924] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d982277-4d37-435c-91a1-f879fa3c9588 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.461588] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 960.461588] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52262d7d-3fe2-a710-45f3-869032b42af2" [ 960.461588] env[65758]: _type = "Task" [ 960.461588] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.477042] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52262d7d-3fe2-a710-45f3-869032b42af2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.490501] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660811, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.557752] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660812, 'name': ReconfigVM_Task, 'duration_secs': 0.290763} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.558157] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 960.563344] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78a21dff-23ad-45ac-a299-043529e8bd64 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.580544] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 960.580544] env[65758]: value = "task-4660814" [ 960.580544] env[65758]: _type = "Task" [ 960.580544] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.590500] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.590690] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.590874] env[65758]: DEBUG nova.network.neutron [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 960.592222] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.646024] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a300ab-8815-8ae6-34f2-7ac26eff150c, 'name': SearchDatastore_Task, 'duration_secs': 0.013235} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.648509] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.648614] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 76ec31e6-65c2-4290-9ec0-b274be95baa4/76ec31e6-65c2-4290-9ec0-b274be95baa4.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.649035] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43c57acf-76e1-4084-9b4f-2d9db8a08e9a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.659136] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 960.659136] env[65758]: value = "task-4660815" [ 960.659136] env[65758]: _type = "Task" [ 960.659136] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.670794] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660815, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.840693] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.841039] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.841467] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.841467] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.841595] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.847029] env[65758]: INFO nova.compute.manager [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Terminating instance [ 960.881586] env[65758]: DEBUG nova.compute.utils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 960.890026] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.890026] env[65758]: DEBUG nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 960.890026] env[65758]: DEBUG nova.network.neutron [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 960.890026] env[65758]: WARNING neutronclient.v2_0.client [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 960.890026] env[65758]: WARNING neutronclient.v2_0.client [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 960.890026] env[65758]: WARNING openstack [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 960.890446] env[65758]: WARNING openstack [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 960.926505] env[65758]: DEBUG oslo_vmware.api [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660809, 'name': PowerOnVM_Task, 'duration_secs': 0.633485} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.926505] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 960.926782] env[65758]: INFO nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Took 10.59 seconds to spawn the instance on the hypervisor. [ 960.926942] env[65758]: DEBUG nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 960.928328] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9677be1b-d1d0-4aff-a1b5-fbff0f85572a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.979636] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52262d7d-3fe2-a710-45f3-869032b42af2, 'name': SearchDatastore_Task, 'duration_secs': 0.014963} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.984314] env[65758]: DEBUG nova.policy [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd07b5ba2c3ef430293fbf39148961763', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bad3e3c7054c424a800cb12e9c5dbb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 960.991673] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-effe7db0-6057-49fa-a495-aac351353588 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.999974] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 960.999974] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5255f411-f78d-fd07-ae98-104b48040d0f" [ 960.999974] env[65758]: _type = "Task" [ 960.999974] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.004373] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660811, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.020052] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5255f411-f78d-fd07-ae98-104b48040d0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.046280] env[65758]: DEBUG nova.compute.manager [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Received event network-vif-deleted-31402f5e-3e8a-4ff8-a2b3-4b5992fb142a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 961.046489] env[65758]: DEBUG nova.compute.manager [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received event network-vif-deleted-ffa44f11-41da-49d5-af63-8c9328cd2c67 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 961.046647] env[65758]: INFO nova.compute.manager [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Neutron deleted interface ffa44f11-41da-49d5-af63-8c9328cd2c67; detaching it from the instance and deleting it from the info cache [ 961.046960] env[65758]: DEBUG nova.network.neutron [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [{"id": "cdcc66de-e599-4e26-8757-617493c55e00", "address": "fa:16:3e:6f:f2:e7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcc66de-e5", "ovs_interfaceid": "cdcc66de-e599-4e26-8757-617493c55e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 961.092647] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660814, 'name': ReconfigVM_Task, 'duration_secs': 0.25576} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.093388] env[65758]: WARNING neutronclient.v2_0.client [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 961.094496] env[65758]: WARNING openstack [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 961.095891] env[65758]: WARNING openstack [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 961.104256] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 961.172290] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660815, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.351079] env[65758]: DEBUG nova.compute.manager [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 961.351325] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.352367] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10223816-2383-4258-83f4-9e23dd38f094 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.361819] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 961.368831] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8f17096-7689-4705-a724-4faf04f66a3e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.379223] env[65758]: DEBUG oslo_vmware.api [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660813, 'name': PowerOnVM_Task, 'duration_secs': 0.591511} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.379223] env[65758]: DEBUG nova.network.neutron [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Successfully created port: ee605185-7c6b-4822-9ed0-b866f77e3500 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 961.382780] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.383021] env[65758]: INFO nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Took 8.45 seconds to spawn the instance on the hypervisor. [ 961.383309] env[65758]: DEBUG nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 961.383748] env[65758]: DEBUG oslo_vmware.api [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 961.383748] env[65758]: value = "task-4660816" [ 961.383748] env[65758]: _type = "Task" [ 961.383748] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.384861] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0082a875-6f3f-4372-abc9-12114e324d65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.390498] env[65758]: DEBUG nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 961.409764] env[65758]: DEBUG oslo_vmware.api [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.459155] env[65758]: INFO nova.compute.manager [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Took 36.06 seconds to build instance. [ 961.493984] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660811, 'name': CreateVM_Task, 'duration_secs': 1.381143} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.494536] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.495013] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.495186] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.495544] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.495833] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294bfc5f-981b-4f39-ae57-bc50bae29192 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.503950] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 961.503950] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52acb43b-117c-f9ce-9c11-f985d643eea8" [ 961.503950] env[65758]: _type = "Task" [ 961.503950] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.520249] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52acb43b-117c-f9ce-9c11-f985d643eea8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.524228] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5255f411-f78d-fd07-ae98-104b48040d0f, 'name': SearchDatastore_Task, 'duration_secs': 0.02692} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.531421] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.531699] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e/d5d27a5c-afe4-49a1-a385-0a8f625b5a1e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.532600] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f14cfe43-723a-4ba3-8645-c86a50b7a9b3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.540628] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 961.540628] env[65758]: value = "task-4660817" [ 961.540628] env[65758]: _type = "Task" [ 961.540628] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.550153] env[65758]: DEBUG oslo_concurrency.lockutils [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] Acquiring lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.556838] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660817, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.674663] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660815, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.751056} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.674805] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 76ec31e6-65c2-4290-9ec0-b274be95baa4/76ec31e6-65c2-4290-9ec0-b274be95baa4.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.676409] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.676409] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a58506af-9233-43a3-9563-f5581d99132f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.685986] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 961.685986] env[65758]: value = "task-4660818" [ 961.685986] env[65758]: _type = "Task" [ 961.685986] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.707938] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660818, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.903838] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e2d3d2-66ce-444a-93d1-53d2b75ba79b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.903838] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d7574c-e2c0-46fb-a491-cd789822c7bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.930450] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58ca07f-ccd1-4c20-a234-9257bd9fdbf7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.936644] env[65758]: INFO nova.compute.manager [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Took 28.69 seconds to build instance. [ 961.946957] env[65758]: DEBUG oslo_vmware.api [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660816, 'name': PowerOffVM_Task, 'duration_secs': 0.304041} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.950400] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 961.950400] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.950763] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8aef2333-4250-487d-bc67-9dbe9605a5aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.953853] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81382d9b-6691-4621-ab53-1f12457111a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.961421] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90598c64-a003-4624-8d4d-9e718035d0c7 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.577s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.972872] env[65758]: DEBUG nova.compute.provider_tree [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.017164] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52acb43b-117c-f9ce-9c11-f985d643eea8, 'name': SearchDatastore_Task, 'duration_secs': 0.036513} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.020699] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.021034] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.021324] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.021492] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.021677] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.023025] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41787292-c401-484b-81e0-eab6e89de733 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.043471] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.043471] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.050217] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76ecaa25-0fe3-4a16-b388-04cd6d4a2252 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.053045] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.053045] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.053340] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleting the datastore file [datastore2] ba3153f2-8e6f-469c-8730-957c5eebe97b {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.053951] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60f01d9f-40bd-4493-88d0-dd4ebefdb3fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.064697] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660817, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.068731] env[65758]: DEBUG oslo_vmware.api [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 962.068731] env[65758]: value = "task-4660820" [ 962.068731] env[65758]: _type = "Task" [ 962.068731] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.068731] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 962.068731] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e7bc49-83be-7df5-9ae8-d614695d6401" [ 962.068731] env[65758]: _type = "Task" [ 962.068731] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.082212] env[65758]: DEBUG oslo_vmware.api [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.086345] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e7bc49-83be-7df5-9ae8-d614695d6401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.195829] env[65758]: WARNING neutronclient.v2_0.client [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 962.196883] env[65758]: WARNING openstack [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 962.197439] env[65758]: WARNING openstack [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 962.207448] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.208562] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd1c8f05-e533-4d2b-85ce-4a8cfabf9ad7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.217239] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660818, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068442} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.219446] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.220134] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 962.220134] env[65758]: value = "task-4660821" [ 962.220134] env[65758]: _type = "Task" [ 962.220134] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.221184] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc81e636-cae3-4982-9be8-864e01978b7e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.252045] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 76ec31e6-65c2-4290-9ec0-b274be95baa4/76ec31e6-65c2-4290-9ec0-b274be95baa4.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.256783] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25deb307-cfe1-4361-8295-23ae5e5cf1eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.274968] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 962.274968] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 962.275188] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 962.276733] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e74e9b-99c1-48fe-809f-aa5858797730 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.283632] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 962.283632] env[65758]: value = "task-4660822" [ 962.283632] env[65758]: _type = "Task" [ 962.283632] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.304512] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1da70b-bf0b-4a38-a629-835cbd616639 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.313971] env[65758]: WARNING nova.virt.vmwareapi.driver [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 962.314561] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.318786] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0d1433-da10-423f-b557-a2caaf1c8998 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.322196] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660822, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.328903] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.329364] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b2e7320-0d28-49f0-82a2-9519d063d5f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.415994] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.415994] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.415994] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleting the datastore file [datastore1] df46c28d-7cbd-490e-8db2-9730e4d9f953 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.416276] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f15b5bed-72b5-4a05-8f3f-bd5842076d49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.423910] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 962.423910] env[65758]: value = "task-4660824" [ 962.423910] env[65758]: _type = "Task" [ 962.423910] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.429442] env[65758]: DEBUG nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 962.437570] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.440212] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a41bf3-42bb-4836-9e9a-085e87f469d5 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "33098961-060f-4503-a805-6ae7351b45ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.205s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.457529] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 962.457868] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 962.458073] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 962.458269] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 962.458412] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 962.458548] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 962.458788] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.458914] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 962.459107] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 962.459265] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 962.459431] env[65758]: DEBUG nova.virt.hardware [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 962.460507] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6255d3d-2709-4d7d-9bad-adf9aef2be5b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.470441] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40744610-0ef7-4e90-b4a0-c34a4fdcc2dd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.475694] env[65758]: DEBUG nova.scheduler.client.report [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.550375] env[65758]: INFO nova.network.neutron [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Port ffa44f11-41da-49d5-af63-8c9328cd2c67 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 962.550915] env[65758]: DEBUG nova.network.neutron [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [{"id": "cdcc66de-e599-4e26-8757-617493c55e00", "address": "fa:16:3e:6f:f2:e7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcc66de-e5", "ovs_interfaceid": "cdcc66de-e599-4e26-8757-617493c55e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 962.559413] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660817, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.764763} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.559689] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e/d5d27a5c-afe4-49a1-a385-0a8f625b5a1e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.559914] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.560219] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea39c136-c406-4109-8675-5358e2a98db8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.569480] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 962.569480] env[65758]: value = "task-4660825" [ 962.569480] env[65758]: _type = "Task" [ 962.569480] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.590981] env[65758]: DEBUG oslo_vmware.api [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.591295] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e7bc49-83be-7df5-9ae8-d614695d6401, 'name': SearchDatastore_Task, 'duration_secs': 0.064157} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.595252] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.595594] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdbcabeb-0cad-4099-a884-186bf4410c3a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.602239] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 962.602239] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc9e68-2402-21ec-4e31-9799479837dc" [ 962.602239] env[65758]: _type = "Task" [ 962.602239] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.614325] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc9e68-2402-21ec-4e31-9799479837dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.812151] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660822, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.937710] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.991035] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.991852] env[65758]: DEBUG nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 962.994979] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.184s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.995303] env[65758]: DEBUG nova.objects.instance [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lazy-loading 'resources' on Instance uuid 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.054917] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.080118] env[65758]: DEBUG oslo_vmware.api [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.584781} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.083146] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.083389] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.083688] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.083851] env[65758]: INFO nova.compute.manager [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Took 1.73 seconds to destroy the instance on the hypervisor. [ 963.084077] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 963.084704] env[65758]: DEBUG nova.compute.manager [-] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 963.084763] env[65758]: DEBUG nova.network.neutron [-] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 963.085587] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 963.086159] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 963.086200] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 963.095492] env[65758]: DEBUG nova.network.neutron [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Successfully updated port: ee605185-7c6b-4822-9ed0-b866f77e3500 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 963.102777] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081542} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.103779] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.107709] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87263f3-2992-4d6c-8316-d2af59c28e79 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.128233] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fc9e68-2402-21ec-4e31-9799479837dc, 'name': SearchDatastore_Task, 'duration_secs': 0.016955} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.136250] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e/d5d27a5c-afe4-49a1-a385-0a8f625b5a1e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.136982] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.136982] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] fcb795c2-dd13-458a-a71e-1c9e4fdc5e06/fcb795c2-dd13-458a-a71e-1c9e4fdc5e06.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.137528] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1403600-22b5-49fa-9341-401cc06923a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.154019] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 963.155830] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f0f1cb8-5fb6-4ecb-bad6-8311d351e810 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.166711] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 963.166711] env[65758]: value = "task-4660827" [ 963.166711] env[65758]: _type = "Task" [ 963.166711] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.168201] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 963.168201] env[65758]: value = "task-4660826" [ 963.168201] env[65758]: _type = "Task" [ 963.168201] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.180653] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660827, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.184669] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.317071] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660822, 'name': ReconfigVM_Task, 'duration_secs': 0.926818} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.317439] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 76ec31e6-65c2-4290-9ec0-b274be95baa4/76ec31e6-65c2-4290-9ec0-b274be95baa4.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.318173] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a82cc18-c754-4627-9fcf-98782c86f186 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.327291] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 963.327291] env[65758]: value = "task-4660828" [ 963.327291] env[65758]: _type = "Task" [ 963.327291] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.342859] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660828, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.440657] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660824, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.524268} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.441331] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.441773] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.441866] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.499377] env[65758]: DEBUG nova.compute.utils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 963.505195] env[65758]: DEBUG nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 963.506691] env[65758]: DEBUG nova.network.neutron [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 963.506691] env[65758]: WARNING neutronclient.v2_0.client [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 963.506691] env[65758]: WARNING neutronclient.v2_0.client [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 963.507190] env[65758]: WARNING openstack [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 963.507779] env[65758]: WARNING openstack [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 963.558617] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eefa441b-0f01-4f64-bb2f-17f8a2eb7407 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-ba3153f2-8e6f-469c-8730-957c5eebe97b-ffa44f11-41da-49d5-af63-8c9328cd2c67" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.033s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.599172] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.599664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.600553] env[65758]: DEBUG nova.network.neutron [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 963.625209] env[65758]: DEBUG nova.policy [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '742a9f6633b54c6f8cd432ac94b59e25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e3a324879d646699f950687546ea861', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 963.685429] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660826, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.690540] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.701780] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "33098961-060f-4503-a805-6ae7351b45ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.701978] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "33098961-060f-4503-a805-6ae7351b45ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.703366] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "33098961-060f-4503-a805-6ae7351b45ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.703366] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "33098961-060f-4503-a805-6ae7351b45ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.703366] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "33098961-060f-4503-a805-6ae7351b45ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.706174] env[65758]: INFO nova.compute.manager [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Terminating instance [ 963.850334] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660828, 'name': Rename_Task, 'duration_secs': 0.16954} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.851020] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.851020] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2458d563-9752-4da7-8864-424f361fcd49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.862507] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 963.862507] env[65758]: value = "task-4660829" [ 963.862507] env[65758]: _type = "Task" [ 963.862507] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.873888] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.949991] env[65758]: INFO nova.virt.block_device [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Booting with volume 51ed0fd6-0a9d-417e-be08-c8c05d6bcc05 at /dev/sdb [ 963.989096] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "6981b99e-8e9f-459a-b356-9ed726c268ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.989478] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "6981b99e-8e9f-459a-b356-9ed726c268ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.005788] env[65758]: DEBUG nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 964.010750] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bdfd79f-4ddb-45c2-9761-62c177a2924a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.841049] env[65758]: DEBUG nova.network.neutron [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Successfully created port: 8fc2eeef-dace-49f2-99aa-448810fbaa32 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 964.852220] env[65758]: WARNING openstack [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 964.852639] env[65758]: WARNING openstack [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 964.860288] env[65758]: DEBUG nova.compute.manager [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 964.860519] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.860832] env[65758]: DEBUG nova.network.neutron [-] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 964.862262] env[65758]: DEBUG nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 964.869010] env[65758]: DEBUG nova.compute.manager [req-9709b5c4-9313-4142-a474-7c453e73bf4f req-8c813c84-59d3-4c6e-af1c-dc060bb0289f service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Received event network-vif-plugged-ee605185-7c6b-4822-9ed0-b866f77e3500 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 964.869010] env[65758]: DEBUG oslo_concurrency.lockutils [req-9709b5c4-9313-4142-a474-7c453e73bf4f req-8c813c84-59d3-4c6e-af1c-dc060bb0289f service nova] Acquiring lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.869010] env[65758]: DEBUG oslo_concurrency.lockutils [req-9709b5c4-9313-4142-a474-7c453e73bf4f req-8c813c84-59d3-4c6e-af1c-dc060bb0289f service nova] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.869010] env[65758]: DEBUG oslo_concurrency.lockutils [req-9709b5c4-9313-4142-a474-7c453e73bf4f req-8c813c84-59d3-4c6e-af1c-dc060bb0289f service nova] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.869010] env[65758]: DEBUG nova.compute.manager [req-9709b5c4-9313-4142-a474-7c453e73bf4f req-8c813c84-59d3-4c6e-af1c-dc060bb0289f service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] No waiting events found dispatching network-vif-plugged-ee605185-7c6b-4822-9ed0-b866f77e3500 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 964.869010] env[65758]: WARNING nova.compute.manager [req-9709b5c4-9313-4142-a474-7c453e73bf4f req-8c813c84-59d3-4c6e-af1c-dc060bb0289f service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Received unexpected event network-vif-plugged-ee605185-7c6b-4822-9ed0-b866f77e3500 for instance with vm_state building and task_state spawning. [ 964.871434] env[65758]: DEBUG nova.compute.manager [req-9fb99742-703a-42ba-9b2c-d9f431e4c4c2 req-77b5cf6b-dbfe-4f09-a478-de4dc505f01c service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Received event network-vif-deleted-cdcc66de-e599-4e26-8757-617493c55e00 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 964.871606] env[65758]: INFO nova.compute.manager [req-9fb99742-703a-42ba-9b2c-d9f431e4c4c2 req-77b5cf6b-dbfe-4f09-a478-de4dc505f01c service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Neutron deleted interface cdcc66de-e599-4e26-8757-617493c55e00; detaching it from the instance and deleting it from the info cache [ 964.871944] env[65758]: DEBUG nova.network.neutron [req-9fb99742-703a-42ba-9b2c-d9f431e4c4c2 req-77b5cf6b-dbfe-4f09-a478-de4dc505f01c service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 964.882259] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b241db52-3ef7-4432-a54c-43f2bc2fb8e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.899822] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Acquiring lock "0ce11868-fee2-40d3-9433-7bc398a1f756" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.899822] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "0ce11868-fee2-40d3-9433-7bc398a1f756" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.907444] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdc8346-7ef7-4363-a73e-434c50f59650 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.918225] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660827, 'name': ReconfigVM_Task, 'duration_secs': 0.686276} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.918479] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660826, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612352} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.925874] env[65758]: DEBUG nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 964.931515] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Reconfigured VM instance instance-0000004e to attach disk [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e/d5d27a5c-afe4-49a1-a385-0a8f625b5a1e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.932298] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] fcb795c2-dd13-458a-a71e-1c9e4fdc5e06/fcb795c2-dd13-458a-a71e-1c9e4fdc5e06.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 964.932518] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 964.933092] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660829, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.933406] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.934567] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ace2d0b0-a9fd-4382-acd2-3699c99999c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.936628] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90992e20-f1f8-4472-805e-6effa465da9b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.938525] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1db8e90a-8b0c-4df6-86ad-aeb7a312e27b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.941586] env[65758]: DEBUG nova.network.neutron [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 964.975519] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-790ed588-02bb-45eb-baf5-150de8a45f15 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.979081] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 964.979081] env[65758]: value = "task-4660831" [ 964.979081] env[65758]: _type = "Task" [ 964.979081] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.979478] env[65758]: DEBUG oslo_vmware.api [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 964.979478] env[65758]: value = "task-4660830" [ 964.979478] env[65758]: _type = "Task" [ 964.979478] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.979655] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 964.979655] env[65758]: value = "task-4660832" [ 964.979655] env[65758]: _type = "Task" [ 964.979655] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.000671] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdd2490-4390-412d-8374-b02c9652ed34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.028040] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660831, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.028787] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e35d72c-57e6-423b-bc7a-d1b940bbbe93 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.032223] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660832, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.032551] env[65758]: DEBUG oslo_vmware.api [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.045018] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643bc1de-3909-4d75-8f92-8859516a3d3c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.064942] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e62d59d-3fce-4fef-93d8-7c1fb5e015b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.096028] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6014ea3a-3ec0-4663-b8d4-87c4c527cec4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.103598] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7323e7ed-bc37-4fe6-8c35-2b7807d27628 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.112337] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a5449e-81ce-4e6c-ad5e-2abb0e9adcbf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.129720] env[65758]: DEBUG nova.compute.provider_tree [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.136071] env[65758]: DEBUG nova.virt.block_device [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updating existing volume attachment record: fbd53bc9-dfec-4489-b67e-515e1a648db7 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 965.180029] env[65758]: WARNING neutronclient.v2_0.client [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 965.180839] env[65758]: WARNING openstack [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 965.181295] env[65758]: WARNING openstack [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 965.350511] env[65758]: DEBUG nova.network.neutron [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Updating instance_info_cache with network_info: [{"id": "ee605185-7c6b-4822-9ed0-b866f77e3500", "address": "fa:16:3e:2a:e9:a3", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee605185-7c", "ovs_interfaceid": "ee605185-7c6b-4822-9ed0-b866f77e3500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 965.376524] env[65758]: DEBUG oslo_concurrency.lockutils [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.376838] env[65758]: DEBUG oslo_concurrency.lockutils [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.378932] env[65758]: DEBUG nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 965.380652] env[65758]: INFO nova.compute.manager [-] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Took 2.30 seconds to deallocate network for instance. [ 965.380922] env[65758]: DEBUG oslo_vmware.api [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660829, 'name': PowerOnVM_Task, 'duration_secs': 1.177654} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.387018] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.387018] env[65758]: INFO nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Took 10.37 seconds to spawn the instance on the hypervisor. [ 965.387018] env[65758]: DEBUG nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 965.398268] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c22d861-c43b-4d59-b204-8c9ee5967699 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.402751] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a7b2c89-e9e7-4280-8273-94f743b98d81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.418614] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8053f516-c0f2-4b52-ae8e-6ad40f1cf4d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.433516] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ba4f15129aca470d270c9d42919ad798',container_format='bare',created_at=2025-11-21T13:19:48Z,direct_url=,disk_format='vmdk',id=fe1141ac-cd89-43cf-a723-116931d6815e,min_disk=1,min_ram=0,name='tempest-test-snap-962754732',owner='3e3a324879d646699f950687546ea861',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-11-21T13:20:03Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 965.433766] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 965.433914] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 965.434101] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 965.434242] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 965.434386] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 965.434735] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.434735] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 965.434877] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 965.435128] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 965.435310] env[65758]: DEBUG nova.virt.hardware [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 965.436512] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.440206] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cac7f49-5138-4453-a7ce-32798e5357d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.452854] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a894dbc-293e-4f32-b362-c92c1397a845 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.472087] env[65758]: DEBUG nova.compute.manager [req-9fb99742-703a-42ba-9b2c-d9f431e4c4c2 req-77b5cf6b-dbfe-4f09-a478-de4dc505f01c service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Detach interface failed, port_id=cdcc66de-e599-4e26-8757-617493c55e00, reason: Instance ba3153f2-8e6f-469c-8730-957c5eebe97b could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 965.472769] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.498372] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660832, 'name': Rename_Task, 'duration_secs': 0.250607} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.504861] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.505326] env[65758]: DEBUG oslo_vmware.api [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660830, 'name': PowerOffVM_Task, 'duration_secs': 0.253126} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.505580] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660831, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110112} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.505821] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-160882a4-2ac8-4720-a9b3-2fd915c9f8ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.507854] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.508068] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.508398] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.508668] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e824e1a-bb1e-4408-bb33-ff02d0f626aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.511167] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba3aa78-5b50-4a71-8b7c-e5622c6029df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.534900] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] fcb795c2-dd13-458a-a71e-1c9e4fdc5e06/fcb795c2-dd13-458a-a71e-1c9e4fdc5e06.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.536517] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9862da82-0f22-4497-b0cb-cfc701c0a5d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.552411] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 965.552411] env[65758]: value = "task-4660834" [ 965.552411] env[65758]: _type = "Task" [ 965.552411] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.559518] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 965.559518] env[65758]: value = "task-4660835" [ 965.559518] env[65758]: _type = "Task" [ 965.559518] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.566769] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660834, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.573037] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660835, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.585854] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.586279] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.586603] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleting the datastore file [datastore2] 33098961-060f-4503-a805-6ae7351b45ea {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.587086] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d27a6f1b-a16a-467f-a9b3-e97949912001 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.596026] env[65758]: DEBUG oslo_vmware.api [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for the task: (returnval){ [ 965.596026] env[65758]: value = "task-4660836" [ 965.596026] env[65758]: _type = "Task" [ 965.596026] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.604797] env[65758]: DEBUG oslo_vmware.api [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.639096] env[65758]: DEBUG nova.scheduler.client.report [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 965.854041] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.854657] env[65758]: DEBUG nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Instance network_info: |[{"id": "ee605185-7c6b-4822-9ed0-b866f77e3500", "address": "fa:16:3e:2a:e9:a3", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee605185-7c", "ovs_interfaceid": "ee605185-7c6b-4822-9ed0-b866f77e3500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 965.855184] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:e9:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee605185-7c6b-4822-9ed0-b866f77e3500', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.863229] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 965.863499] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.863746] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48115169-c2fe-4d2f-8711-a64291f4f977 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.885889] env[65758]: INFO nova.compute.manager [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Detaching volume fbd302a4-8737-4848-94c0-7cfc81983fb5 [ 965.889329] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.889329] env[65758]: value = "task-4660837" [ 965.889329] env[65758]: _type = "Task" [ 965.889329] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.905089] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.905407] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660837, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.928344] env[65758]: INFO nova.compute.manager [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Took 31.40 seconds to build instance. [ 965.938054] env[65758]: INFO nova.virt.block_device [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Attempting to driver detach volume fbd302a4-8737-4848-94c0-7cfc81983fb5 from mountpoint /dev/sdb [ 965.938054] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 965.938054] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909962', 'volume_id': 'fbd302a4-8737-4848-94c0-7cfc81983fb5', 'name': 'volume-fbd302a4-8737-4848-94c0-7cfc81983fb5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f5911fb-785e-444c-9408-c6884e06c5d3', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbd302a4-8737-4848-94c0-7cfc81983fb5', 'serial': 'fbd302a4-8737-4848-94c0-7cfc81983fb5'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 965.938054] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7160084-1c4a-4298-a43c-8d1b4f19258d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.964997] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e26703-18fd-4878-a2fc-a68aa6c4689b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.976835] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffdb2f0-3086-4114-91f6-fa1ceee74f4a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.003323] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf1d4f4-519e-4367-a10a-6ee927b8fcec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.023066] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The volume has not been displaced from its original location: [datastore2] volume-fbd302a4-8737-4848-94c0-7cfc81983fb5/volume-fbd302a4-8737-4848-94c0-7cfc81983fb5.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 966.030131] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Reconfiguring VM instance instance-0000003f to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 966.030561] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2d789c9-dacb-43e3-b8c1-4ac7e53d6ac1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.052985] env[65758]: DEBUG oslo_vmware.api [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 966.052985] env[65758]: value = "task-4660838" [ 966.052985] env[65758]: _type = "Task" [ 966.052985] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.073152] env[65758]: DEBUG oslo_vmware.api [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660838, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.084959] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660834, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.089794] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660835, 'name': ReconfigVM_Task, 'duration_secs': 0.42223} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.090308] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Reconfigured VM instance instance-00000052 to attach disk [datastore1] fcb795c2-dd13-458a-a71e-1c9e4fdc5e06/fcb795c2-dd13-458a-a71e-1c9e4fdc5e06.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.090907] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc912383-f9d1-4420-8465-a3fa5a8a7bdf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.102679] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 966.102679] env[65758]: value = "task-4660839" [ 966.102679] env[65758]: _type = "Task" [ 966.102679] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.111033] env[65758]: DEBUG oslo_vmware.api [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Task: {'id': task-4660836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194819} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.111822] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.112049] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.112277] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.112478] env[65758]: INFO nova.compute.manager [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Took 1.25 seconds to destroy the instance on the hypervisor. [ 966.112774] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 966.113017] env[65758]: DEBUG nova.compute.manager [-] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 966.113167] env[65758]: DEBUG nova.network.neutron [-] [instance: 33098961-060f-4503-a805-6ae7351b45ea] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 966.113520] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 966.114266] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 966.114574] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 966.128482] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660839, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.146667] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.152s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.149088] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.329s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.164538] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 966.177282] env[65758]: INFO nova.scheduler.client.report [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Deleted allocations for instance 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720 [ 966.402605] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660837, 'name': CreateVM_Task, 'duration_secs': 0.486858} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.403053] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 966.403246] env[65758]: WARNING neutronclient.v2_0.client [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 966.403644] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.403943] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.404141] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 966.404458] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1aafbca-e653-4cfb-aa4a-2c1abd2d5a58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.410292] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 966.410292] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527a96e7-cd79-9590-b8f5-aedb1d96a771" [ 966.410292] env[65758]: _type = "Task" [ 966.410292] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.421468] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527a96e7-cd79-9590-b8f5-aedb1d96a771, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.431344] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fbab0c1e-9b85-47ad-a8a5-d4004e1595d3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.924s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.568202] env[65758]: DEBUG oslo_vmware.api [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660838, 'name': ReconfigVM_Task, 'duration_secs': 0.257743} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.571658] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Reconfigured VM instance instance-0000003f to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 966.577309] env[65758]: DEBUG oslo_vmware.api [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660834, 'name': PowerOnVM_Task, 'duration_secs': 0.604419} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.579570] env[65758]: DEBUG nova.network.neutron [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Successfully updated port: 8fc2eeef-dace-49f2-99aa-448810fbaa32 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 966.580648] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd7df16b-b3b2-4cbb-91b0-e6ed3f90d96c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.591233] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.592056] env[65758]: DEBUG nova.compute.manager [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 966.592725] env[65758]: DEBUG nova.compute.manager [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Received event network-changed-ee605185-7c6b-4822-9ed0-b866f77e3500 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 966.592901] env[65758]: DEBUG nova.compute.manager [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Refreshing instance network info cache due to event network-changed-ee605185-7c6b-4822-9ed0-b866f77e3500. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 966.593126] env[65758]: DEBUG oslo_concurrency.lockutils [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] Acquiring lock "refresh_cache-e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.593263] env[65758]: DEBUG oslo_concurrency.lockutils [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] Acquired lock "refresh_cache-e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.593447] env[65758]: DEBUG nova.network.neutron [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Refreshing network info cache for port ee605185-7c6b-4822-9ed0-b866f77e3500 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 966.594934] env[65758]: INFO nova.compute.manager [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Rebuilding instance [ 966.598686] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "refresh_cache-a6ed7451-7b59-4ed9-8fb7-871d6107a272" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.598841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "refresh_cache-a6ed7451-7b59-4ed9-8fb7-871d6107a272" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.598995] env[65758]: DEBUG nova.network.neutron [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 966.601043] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35542177-68ea-48f3-ac4b-eb4de4a53eec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.612535] env[65758]: DEBUG oslo_vmware.api [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 966.612535] env[65758]: value = "task-4660840" [ 966.612535] env[65758]: _type = "Task" [ 966.612535] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.617706] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660839, 'name': Rename_Task, 'duration_secs': 0.158051} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.626261] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.627428] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ff78ddb-06ea-420c-bc69-c03977b1c632 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.643867] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 966.643867] env[65758]: value = "task-4660841" [ 966.643867] env[65758]: _type = "Task" [ 966.643867] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.644706] env[65758]: DEBUG oslo_vmware.api [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660840, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.655859] env[65758]: INFO nova.compute.claims [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.667189] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.670375] env[65758]: DEBUG nova.compute.manager [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 966.671324] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fbce51-852d-4cc1-9907-6dfeb33336bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.686765] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f01678a3-6703-45f3-8385-b50e92755c6d tempest-SecurityGroupsTestJSON-1901456647 tempest-SecurityGroupsTestJSON-1901456647-project-member] Lock "7c0e6911-4f85-4b47-a7e9-84d0e3bb5720" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.887s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.922946] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527a96e7-cd79-9590-b8f5-aedb1d96a771, 'name': SearchDatastore_Task, 'duration_secs': 0.014463} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.923322] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.923582] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.923839] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.924032] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.924153] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.924453] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9273465-c12f-427a-b123-a29a8e98806a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.936663] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.936875] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.937725] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca027ce5-e1bb-4216-943e-b81bdf660ae7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.946354] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 966.946354] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525bb464-9a4b-e3b9-b85d-c476a55e693b" [ 966.946354] env[65758]: _type = "Task" [ 966.946354] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.955463] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525bb464-9a4b-e3b9-b85d-c476a55e693b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.976122] env[65758]: DEBUG nova.compute.manager [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Received event network-vif-plugged-8fc2eeef-dace-49f2-99aa-448810fbaa32 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 966.977027] env[65758]: DEBUG oslo_concurrency.lockutils [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Acquiring lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.977027] env[65758]: DEBUG oslo_concurrency.lockutils [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.977027] env[65758]: DEBUG oslo_concurrency.lockutils [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.977027] env[65758]: DEBUG nova.compute.manager [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] No waiting events found dispatching network-vif-plugged-8fc2eeef-dace-49f2-99aa-448810fbaa32 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 966.977291] env[65758]: WARNING nova.compute.manager [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Received unexpected event network-vif-plugged-8fc2eeef-dace-49f2-99aa-448810fbaa32 for instance with vm_state building and task_state spawning. [ 966.977291] env[65758]: DEBUG nova.compute.manager [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Received event network-changed-8fc2eeef-dace-49f2-99aa-448810fbaa32 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 966.977845] env[65758]: DEBUG nova.compute.manager [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Refreshing instance network info cache due to event network-changed-8fc2eeef-dace-49f2-99aa-448810fbaa32. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 966.977845] env[65758]: DEBUG oslo_concurrency.lockutils [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Acquiring lock "refresh_cache-a6ed7451-7b59-4ed9-8fb7-871d6107a272" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.005303] env[65758]: DEBUG nova.network.neutron [-] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 967.101630] env[65758]: WARNING neutronclient.v2_0.client [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 967.103086] env[65758]: WARNING openstack [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 967.103086] env[65758]: WARNING openstack [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 967.114835] env[65758]: WARNING openstack [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 967.114835] env[65758]: WARNING openstack [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 967.136122] env[65758]: DEBUG oslo_vmware.api [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660840, 'name': ReconfigVM_Task, 'duration_secs': 0.221083} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.138523] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909962', 'volume_id': 'fbd302a4-8737-4848-94c0-7cfc81983fb5', 'name': 'volume-fbd302a4-8737-4848-94c0-7cfc81983fb5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f5911fb-785e-444c-9408-c6884e06c5d3', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbd302a4-8737-4848-94c0-7cfc81983fb5', 'serial': 'fbd302a4-8737-4848-94c0-7cfc81983fb5'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 967.141424] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.156523] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660841, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.172451] env[65758]: INFO nova.compute.resource_tracker [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating resource usage from migration fbb0ee87-076d-4bf3-b98e-480be784f44a [ 967.193766] env[65758]: DEBUG nova.network.neutron [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 967.278367] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 967.279128] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 967.279128] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 967.279128] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 967.279336] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 967.279594] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 967.279811] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 967.279994] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 967.280492] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 967.280630] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 967.280804] env[65758]: DEBUG nova.virt.hardware [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 967.281931] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba50c0c-836c-4a3c-8e23-b093250938eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.296613] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101d935f-a560-4855-9df3-8c68312f1306 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.322190] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:36:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f597ff54-9371-4703-893c-3b7ad96d394d', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.330174] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 967.333598] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 967.334226] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbe73543-a639-4925-a49b-d2fc7a5382f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.360702] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.360702] env[65758]: value = "task-4660842" [ 967.360702] env[65758]: _type = "Task" [ 967.360702] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.371384] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660842, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.452403] env[65758]: WARNING neutronclient.v2_0.client [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 967.453102] env[65758]: WARNING openstack [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 967.453555] env[65758]: WARNING openstack [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 967.474174] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525bb464-9a4b-e3b9-b85d-c476a55e693b, 'name': SearchDatastore_Task, 'duration_secs': 0.018409} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.475100] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83fbf12a-667e-49c7-833e-b816e115bba6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.485590] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 967.485590] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d4e13f-f241-5c1b-6110-619018f4e459" [ 967.485590] env[65758]: _type = "Task" [ 967.485590] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.491415] env[65758]: WARNING neutronclient.v2_0.client [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 967.492174] env[65758]: WARNING openstack [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 967.492545] env[65758]: WARNING openstack [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 967.506883] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d4e13f-f241-5c1b-6110-619018f4e459, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.508592] env[65758]: INFO nova.compute.manager [-] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Took 1.40 seconds to deallocate network for instance. [ 967.556489] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.557808] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.557808] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.557808] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.557808] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.562990] env[65758]: INFO nova.compute.manager [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Terminating instance [ 967.627309] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b26d19-33c3-4045-8ea7-203fd0642b61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.642705] env[65758]: DEBUG nova.network.neutron [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Updating instance_info_cache with network_info: [{"id": "8fc2eeef-dace-49f2-99aa-448810fbaa32", "address": "fa:16:3e:34:fe:78", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fc2eeef-da", "ovs_interfaceid": "8fc2eeef-dace-49f2-99aa-448810fbaa32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 967.648427] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3fc501-1500-4df7-8f30-d6d86afd304c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.661017] env[65758]: DEBUG oslo_vmware.api [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660841, 'name': PowerOnVM_Task, 'duration_secs': 0.550534} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.690737] env[65758]: DEBUG nova.network.neutron [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Updated VIF entry in instance network info cache for port ee605185-7c6b-4822-9ed0-b866f77e3500. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 967.690737] env[65758]: DEBUG nova.network.neutron [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Updating instance_info_cache with network_info: [{"id": "ee605185-7c6b-4822-9ed0-b866f77e3500", "address": "fa:16:3e:2a:e9:a3", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee605185-7c", "ovs_interfaceid": "ee605185-7c6b-4822-9ed0-b866f77e3500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 967.691159] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 967.691975] env[65758]: INFO nova.compute.manager [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Took 7.97 seconds to spawn the instance on the hypervisor. [ 967.691975] env[65758]: DEBUG nova.compute.manager [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 967.693198] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.694295] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec01570-a244-4958-952d-ef7178416700 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.697623] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2db5af-7bd1-434c-80bb-5443ec9ca09c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.700959] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-691ed5cd-47f3-4a5c-b56d-7ab2226c2da4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.712849] env[65758]: DEBUG nova.objects.instance [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.722115] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 967.722115] env[65758]: value = "task-4660843" [ 967.722115] env[65758]: _type = "Task" [ 967.722115] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.722115] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee44334-f79c-45e5-9e82-17fccde1e141 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.737958] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.748441] env[65758]: DEBUG nova.compute.provider_tree [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.880169] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660842, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.999683] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d4e13f-f241-5c1b-6110-619018f4e459, 'name': SearchDatastore_Task, 'duration_secs': 0.023686} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.000112] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.000436] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a/e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 968.000713] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38f76835-299a-482d-b8f9-c21c4f34278a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.008826] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 968.008826] env[65758]: value = "task-4660844" [ 968.008826] env[65758]: _type = "Task" [ 968.008826] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.015312] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.019799] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.074475] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "refresh_cache-d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.074475] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquired lock "refresh_cache-d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.074475] env[65758]: DEBUG nova.network.neutron [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 968.150249] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "refresh_cache-a6ed7451-7b59-4ed9-8fb7-871d6107a272" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.150654] env[65758]: DEBUG nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Instance network_info: |[{"id": "8fc2eeef-dace-49f2-99aa-448810fbaa32", "address": "fa:16:3e:34:fe:78", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fc2eeef-da", "ovs_interfaceid": "8fc2eeef-dace-49f2-99aa-448810fbaa32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 968.151151] env[65758]: DEBUG oslo_concurrency.lockutils [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Acquired lock "refresh_cache-a6ed7451-7b59-4ed9-8fb7-871d6107a272" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.151351] env[65758]: DEBUG nova.network.neutron [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Refreshing network info cache for port 8fc2eeef-dace-49f2-99aa-448810fbaa32 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 968.152971] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:fe:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fc2eeef-dace-49f2-99aa-448810fbaa32', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 968.161945] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 968.163491] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 968.163887] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e87b3456-fb82-41ee-beb7-e350956e384c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.188026] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 968.188026] env[65758]: value = "task-4660845" [ 968.188026] env[65758]: _type = "Task" [ 968.188026] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.194297] env[65758]: DEBUG oslo_concurrency.lockutils [req-ffba32ea-81fa-4e07-96d7-eb1832b9aa3e req-040f22c0-bcbd-4ea4-8707-63ce148d9c9f service nova] Releasing lock "refresh_cache-e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.201627] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660845, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.235942] env[65758]: INFO nova.compute.manager [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Took 29.00 seconds to build instance. [ 968.246440] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660843, 'name': PowerOffVM_Task, 'duration_secs': 0.284537} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.248165] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.248165] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.248378] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83d957d-4dd9-447e-8754-b5fd1b03b5ff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.255915] env[65758]: DEBUG nova.scheduler.client.report [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.269042] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.269042] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc909190-e721-4676-a804-d21dd8851166 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.370688] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 968.371698] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 968.371698] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleting the datastore file [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.372897] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-571e7d67-2bcf-402e-a7d1-edb0aa0de305 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.382260] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660842, 'name': CreateVM_Task, 'duration_secs': 0.78341} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.383226] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.384061] env[65758]: WARNING neutronclient.v2_0.client [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 968.384901] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.385276] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.386309] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 968.387374] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96992b94-d141-4d59-b3d6-8c3a3aa1dee0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.394262] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 968.394262] env[65758]: value = "task-4660847" [ 968.394262] env[65758]: _type = "Task" [ 968.394262] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.399994] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 968.399994] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5231cfba-1f5b-b3e5-d245-28a5a5e38c89" [ 968.399994] env[65758]: _type = "Task" [ 968.399994] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.407583] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.415015] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5231cfba-1f5b-b3e5-d245-28a5a5e38c89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.524453] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660844, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.583054] env[65758]: WARNING neutronclient.v2_0.client [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 968.583054] env[65758]: WARNING openstack [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 968.583054] env[65758]: WARNING openstack [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 968.622057] env[65758]: DEBUG nova.network.neutron [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 968.663789] env[65758]: WARNING neutronclient.v2_0.client [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 968.667049] env[65758]: WARNING openstack [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 968.667049] env[65758]: WARNING openstack [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 968.699905] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660845, 'name': CreateVM_Task, 'duration_secs': 0.509157} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.700575] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.701166] env[65758]: WARNING neutronclient.v2_0.client [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 968.701843] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.701919] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.702348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 968.702551] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f39a8628-61dd-40de-ac95-652bda133757 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.708722] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 968.708722] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a176d6-dfc6-3a7d-6ec7-59e2be059b1c" [ 968.708722] env[65758]: _type = "Task" [ 968.708722] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.717739] env[65758]: DEBUG nova.network.neutron [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 968.727551] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a176d6-dfc6-3a7d-6ec7-59e2be059b1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.738283] env[65758]: DEBUG oslo_concurrency.lockutils [None req-78dacb53-2f11-4410-b040-a19c5464a24e tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.361s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.739801] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2956ed33-e135-4fcb-b2b2-85ae398384d1 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.512s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.761529] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.612s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.762028] env[65758]: INFO nova.compute.manager [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Migrating [ 968.770552] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.872s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.770915] env[65758]: DEBUG nova.objects.instance [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lazy-loading 'resources' on Instance uuid e93528eb-33d0-46d1-94e8-d1d66f2c682f {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.919509] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.927857] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5231cfba-1f5b-b3e5-d245-28a5a5e38c89, 'name': SearchDatastore_Task, 'duration_secs': 0.077853} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.931361] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.931361] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 968.931697] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.932048] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.932048] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.933086] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-554dbfaa-8d8b-4098-92f6-11cc0244b2ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.944403] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.944639] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 968.945624] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-005ec819-432f-4dd4-8d20-1eb9b6eb68e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.952494] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 968.952494] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52eda68b-d6a3-ebb1-abdb-52312f95f5ed" [ 968.952494] env[65758]: _type = "Task" [ 968.952494] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.966147] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52eda68b-d6a3-ebb1-abdb-52312f95f5ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.008462] env[65758]: WARNING neutronclient.v2_0.client [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 969.009303] env[65758]: WARNING openstack [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 969.009722] env[65758]: WARNING openstack [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 969.030284] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643023} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.030576] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a/e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 969.030787] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 969.031077] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19941b07-7f74-463a-a535-2e39275f1929 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.043682] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 969.043682] env[65758]: value = "task-4660848" [ 969.043682] env[65758]: _type = "Task" [ 969.043682] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.056806] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660848, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.116080] env[65758]: DEBUG nova.compute.manager [req-259855d8-d64e-447f-abad-5d4cd0ec04a1 req-962118e3-f4f9-4b1c-b5e5-07033755a21f service nova] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Received event network-vif-deleted-2bcc5488-41d1-43a9-8b40-17b8081f4a31 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 969.156229] env[65758]: DEBUG nova.network.neutron [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Updated VIF entry in instance network info cache for port 8fc2eeef-dace-49f2-99aa-448810fbaa32. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 969.158291] env[65758]: DEBUG nova.network.neutron [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Updating instance_info_cache with network_info: [{"id": "8fc2eeef-dace-49f2-99aa-448810fbaa32", "address": "fa:16:3e:34:fe:78", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fc2eeef-da", "ovs_interfaceid": "8fc2eeef-dace-49f2-99aa-448810fbaa32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 969.208172] env[65758]: DEBUG nova.compute.manager [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Received event network-changed-2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 969.208517] env[65758]: DEBUG nova.compute.manager [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Refreshing instance network info cache due to event network-changed-2e41907c-1553-48df-9644-cb422d2f19df. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 969.209255] env[65758]: DEBUG oslo_concurrency.lockutils [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] Acquiring lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.209255] env[65758]: DEBUG oslo_concurrency.lockutils [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] Acquired lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.209486] env[65758]: DEBUG nova.network.neutron [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Refreshing network info cache for port 2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 969.227759] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Releasing lock "refresh_cache-d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.228317] env[65758]: DEBUG nova.compute.manager [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 969.228521] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 969.229496] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6494b19-5938-4c44-9fc6-564c91cb8597 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.232419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.232641] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Processing image fe1141ac-cd89-43cf-a723-116931d6815e {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.232905] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e/fe1141ac-cd89-43cf-a723-116931d6815e.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.233010] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e/fe1141ac-cd89-43cf-a723-116931d6815e.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.233191] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.233815] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46739e60-d56d-4fd3-99fe-f5014ddb3806 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.241683] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 969.241995] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-552825ba-6b6b-4bdd-934a-0dc66e8b87a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.250561] env[65758]: DEBUG oslo_vmware.api [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 969.250561] env[65758]: value = "task-4660849" [ 969.250561] env[65758]: _type = "Task" [ 969.250561] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.255666] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.255829] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.260468] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f89f7b-f44f-4508-ab04-34a5b147a391 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.267906] env[65758]: DEBUG oslo_vmware.api [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660849, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.272633] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 969.272633] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5231d2e3-19fe-9556-8600-50a509c21373" [ 969.272633] env[65758]: _type = "Task" [ 969.272633] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.288027] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5231d2e3-19fe-9556-8600-50a509c21373, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.288027] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.288027] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.288027] env[65758]: DEBUG nova.network.neutron [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 969.408730] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.552376} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.409039] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.409248] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.410225] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.470740] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52eda68b-d6a3-ebb1-abdb-52312f95f5ed, 'name': SearchDatastore_Task, 'duration_secs': 0.012072} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.470740] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58df782c-5b1f-4fbf-bee5-3db1cdfa7586 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.476180] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 969.476180] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c088aa-6ab6-5528-afde-deb8caee0c74" [ 969.476180] env[65758]: _type = "Task" [ 969.476180] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.494476] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c088aa-6ab6-5528-afde-deb8caee0c74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.540316] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.540572] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.540746] env[65758]: DEBUG nova.compute.manager [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 969.541854] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad261ee-d0b8-4b06-a81b-38d2db6230eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.554454] env[65758]: DEBUG nova.compute.manager [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 969.555109] env[65758]: DEBUG nova.objects.instance [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.562828] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660848, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080208} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.563650] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.564980] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d61f7c-10a5-4894-b1b6-636c2e3e4b76 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.593893] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a/e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.597593] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-632bad98-331d-4867-a9c7-12671dc48276 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.611920] env[65758]: DEBUG nova.compute.manager [None req-b8a8461e-3908-455e-bff4-88e5c127e901 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 969.614014] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012af775-361b-4863-b1ff-aad5b5779777 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.624653] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 969.624653] env[65758]: value = "task-4660850" [ 969.624653] env[65758]: _type = "Task" [ 969.624653] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.639038] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660850, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.663963] env[65758]: DEBUG oslo_concurrency.lockutils [req-0d182852-6307-4848-98d1-3e59fa1a8eec req-2d75d4e3-89c4-4961-b48d-ad6138ebb1f8 service nova] Releasing lock "refresh_cache-a6ed7451-7b59-4ed9-8fb7-871d6107a272" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.715589] env[65758]: WARNING neutronclient.v2_0.client [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 969.716094] env[65758]: WARNING openstack [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 969.718741] env[65758]: WARNING openstack [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 969.764600] env[65758]: DEBUG oslo_vmware.api [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660849, 'name': PowerOffVM_Task, 'duration_secs': 0.269614} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.765897] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff1f8f8-2d9f-41fd-801c-e5f458000040 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.768588] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.768779] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.769071] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-437c9392-1abb-4b08-9360-30f2e82ed681 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.787891] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ce8cb3-97f5-40e8-8fe1-23b202544679 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.801598] env[65758]: WARNING neutronclient.v2_0.client [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 969.802745] env[65758]: WARNING openstack [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 969.803430] env[65758]: WARNING openstack [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 969.818232] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Preparing fetch location {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 969.818882] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Fetch image to [datastore2] OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696/OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696.vmdk {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 969.819011] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Downloading stream optimized image fe1141ac-cd89-43cf-a723-116931d6815e to [datastore2] OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696/OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696.vmdk on the data store datastore2 as vApp {{(pid=65758) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 969.820027] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Downloading image file data fe1141ac-cd89-43cf-a723-116931d6815e to the ESX as VM named 'OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696' {{(pid=65758) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 969.822510] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.822978] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.823064] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Deleting the datastore file [datastore1] d5d27a5c-afe4-49a1-a385-0a8f625b5a1e {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.859069] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64f2468f-2fb8-4831-b35b-34ba49f047d0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.865650] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4042ce1-3707-473b-8961-581f5caeb7b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.899755] env[65758]: DEBUG oslo_vmware.api [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for the task: (returnval){ [ 969.899755] env[65758]: value = "task-4660852" [ 969.899755] env[65758]: _type = "Task" [ 969.899755] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.900452] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51098d4d-6e6d-470d-8807-107d7d58defd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.914277] env[65758]: DEBUG oslo_vmware.api [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660852, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.930611] env[65758]: DEBUG nova.compute.provider_tree [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.977290] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 969.977290] env[65758]: value = "resgroup-9" [ 969.977290] env[65758]: _type = "ResourcePool" [ 969.977290] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 969.979290] env[65758]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-cff7c5cc-6dad-4678-b744-cd8b3262bf6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.018221] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c088aa-6ab6-5528-afde-deb8caee0c74, 'name': SearchDatastore_Task, 'duration_secs': 0.021174} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.020091] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.020441] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] df46c28d-7cbd-490e-8db2-9730e4d9f953/df46c28d-7cbd-490e-8db2-9730e4d9f953.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 970.020812] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lease: (returnval){ [ 970.020812] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245f9a4-7a0a-bcf1-bb94-6d40e147c4d8" [ 970.020812] env[65758]: _type = "HttpNfcLease" [ 970.020812] env[65758]: } obtained for vApp import into resource pool (val){ [ 970.020812] env[65758]: value = "resgroup-9" [ 970.020812] env[65758]: _type = "ResourcePool" [ 970.020812] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 970.021088] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the lease: (returnval){ [ 970.021088] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245f9a4-7a0a-bcf1-bb94-6d40e147c4d8" [ 970.021088] env[65758]: _type = "HttpNfcLease" [ 970.021088] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 970.021324] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8adbf5f8-89b5-4052-bb7c-384a3d2dd18e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.042655] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 970.042655] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245f9a4-7a0a-bcf1-bb94-6d40e147c4d8" [ 970.042655] env[65758]: _type = "HttpNfcLease" [ 970.042655] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 970.044493] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 970.044493] env[65758]: value = "task-4660854" [ 970.044493] env[65758]: _type = "Task" [ 970.044493] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.058279] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.061069] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquiring lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.061478] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.061689] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquiring lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.061965] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.062221] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.066265] env[65758]: INFO nova.compute.manager [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Terminating instance [ 970.075035] env[65758]: WARNING neutronclient.v2_0.client [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 970.075164] env[65758]: WARNING openstack [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 970.075549] env[65758]: WARNING openstack [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 970.126369] env[65758]: INFO nova.compute.manager [None req-b8a8461e-3908-455e-bff4-88e5c127e901 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] instance snapshotting [ 970.127181] env[65758]: DEBUG nova.objects.instance [None req-b8a8461e-3908-455e-bff4-88e5c127e901 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lazy-loading 'flavor' on Instance uuid fcb795c2-dd13-458a-a71e-1c9e4fdc5e06 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.139762] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660850, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.191515] env[65758]: WARNING neutronclient.v2_0.client [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 970.192186] env[65758]: WARNING openstack [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 970.192842] env[65758]: WARNING openstack [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 970.242639] env[65758]: DEBUG nova.network.neutron [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updated VIF entry in instance network info cache for port 2e41907c-1553-48df-9644-cb422d2f19df. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 970.243013] env[65758]: DEBUG nova.network.neutron [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [{"id": "2e41907c-1553-48df-9644-cb422d2f19df", "address": "fa:16:3e:b2:e3:b9", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e41907c-15", "ovs_interfaceid": "2e41907c-1553-48df-9644-cb422d2f19df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 970.335095] env[65758]: DEBUG nova.network.neutron [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance_info_cache with network_info: [{"id": "1a0cdbf3-b230-4f89-999a-4886f142722c", "address": "fa:16:3e:a2:d0:8b", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0cdbf3-b2", "ovs_interfaceid": "1a0cdbf3-b230-4f89-999a-4886f142722c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 970.416675] env[65758]: DEBUG oslo_vmware.api [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Task: {'id': task-4660852, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157556} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.416907] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.417101] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.417324] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.417444] env[65758]: INFO nova.compute.manager [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Took 1.19 seconds to destroy the instance on the hypervisor. [ 970.417740] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 970.417951] env[65758]: DEBUG nova.compute.manager [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 970.418148] env[65758]: DEBUG nova.network.neutron [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 970.418299] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 970.418861] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 970.419165] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 970.433184] env[65758]: DEBUG nova.scheduler.client.report [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 970.443523] env[65758]: DEBUG nova.network.neutron [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 970.443811] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 970.462322] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 970.462611] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 970.462765] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 970.462949] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 970.463161] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 970.463449] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 970.463779] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.463972] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 970.464172] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 970.464479] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 970.464741] env[65758]: DEBUG nova.virt.hardware [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 970.465985] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d45d257-cbcc-42b7-bdde-b77696791c96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.476861] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cdd1a5-f0c7-4f90-90ac-41c42f9ff6a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.494334] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:c9:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5efce30e-48dd-493a-a354-f562a8adf7af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30972d97-c096-41a5-b3bf-289b54c95d25', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.502105] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 970.502429] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.502681] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93f57845-0ae4-4f77-b1b9-8daee59aff8f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.525627] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.525627] env[65758]: value = "task-4660855" [ 970.525627] env[65758]: _type = "Task" [ 970.525627] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.537646] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 970.537646] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245f9a4-7a0a-bcf1-bb94-6d40e147c4d8" [ 970.537646] env[65758]: _type = "HttpNfcLease" [ 970.537646] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 970.544217] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660855, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.554678] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.570862] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.571678] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquiring lock "refresh_cache-fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.571878] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquired lock "refresh_cache-fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.572087] env[65758]: DEBUG nova.network.neutron [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 970.573354] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5ee5e45-c4a1-44c5-bc32-27d567c2a170 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.583246] env[65758]: DEBUG oslo_vmware.api [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 970.583246] env[65758]: value = "task-4660856" [ 970.583246] env[65758]: _type = "Task" [ 970.583246] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.597184] env[65758]: DEBUG oslo_vmware.api [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660856, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.643758] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e27ba1-1276-4ae7-988d-fb3c59fe61ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.654386] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660850, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.669905] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a6a846-a6c7-4444-8298-9a23f642fb16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.746131] env[65758]: DEBUG oslo_concurrency.lockutils [req-57f53d31-616b-4ecb-908a-146f8317d668 req-2a8c039f-e4d9-4611-998e-53bc3783647b service nova] Releasing lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.840465] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.939263] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.169s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.943406] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.792s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.943406] env[65758]: DEBUG nova.objects.instance [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lazy-loading 'resources' on Instance uuid 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.945861] env[65758]: DEBUG nova.network.neutron [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 970.968586] env[65758]: INFO nova.scheduler.client.report [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Deleted allocations for instance e93528eb-33d0-46d1-94e8-d1d66f2c682f [ 971.038121] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 971.038121] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245f9a4-7a0a-bcf1-bb94-6d40e147c4d8" [ 971.038121] env[65758]: _type = "HttpNfcLease" [ 971.038121] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 971.041446] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660855, 'name': CreateVM_Task, 'duration_secs': 0.444258} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.041567] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 971.042153] env[65758]: WARNING neutronclient.v2_0.client [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release.: NotImplementedError [ 971.042531] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.042730] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.043655] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 971.043655] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d87c3c66-5eff-4539-95ef-16e1f38962d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.054910] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.056556] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 971.056556] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5243184b-3b5f-0d39-2eb6-77616ad3a648" [ 971.056556] env[65758]: _type = "Task" [ 971.056556] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.066533] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5243184b-3b5f-0d39-2eb6-77616ad3a648, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.076483] env[65758]: WARNING neutronclient.v2_0.client [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 971.077526] env[65758]: WARNING openstack [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 971.077732] env[65758]: WARNING openstack [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 971.095471] env[65758]: DEBUG oslo_vmware.api [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660856, 'name': PowerOffVM_Task, 'duration_secs': 0.269298} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.096086] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.096244] env[65758]: DEBUG nova.compute.manager [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 971.097522] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4472630d-8996-4428-b105-23d7e8f9851c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.142412] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660850, 'name': ReconfigVM_Task, 'duration_secs': 1.476768} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.142412] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Reconfigured VM instance instance-00000053 to attach disk [datastore2] e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a/e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.142412] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c51e12fe-071d-47c7-8006-3fca0ec9c679 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.150306] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 971.150306] env[65758]: value = "task-4660857" [ 971.150306] env[65758]: _type = "Task" [ 971.150306] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.162832] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660857, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.182919] env[65758]: DEBUG nova.compute.manager [None req-b8a8461e-3908-455e-bff4-88e5c127e901 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Instance disappeared during snapshot {{(pid=65758) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4635}} [ 971.310210] env[65758]: DEBUG nova.network.neutron [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 971.346408] env[65758]: DEBUG nova.compute.manager [None req-b8a8461e-3908-455e-bff4-88e5c127e901 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Found 0 images (rotation: 2) {{(pid=65758) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5057}} [ 971.449763] env[65758]: INFO nova.compute.manager [-] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Took 1.03 seconds to deallocate network for instance. [ 971.485538] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fc3612b0-01ed-40d3-9b22-15d78dcf6552 tempest-ServersWithSpecificFlavorTestJSON-223687661 tempest-ServersWithSpecificFlavorTestJSON-223687661-project-member] Lock "e93528eb-33d0-46d1-94e8-d1d66f2c682f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.490s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.539010] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 971.539010] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245f9a4-7a0a-bcf1-bb94-6d40e147c4d8" [ 971.539010] env[65758]: _type = "HttpNfcLease" [ 971.539010] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 971.539480] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 971.539480] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5245f9a4-7a0a-bcf1-bb94-6d40e147c4d8" [ 971.539480] env[65758]: _type = "HttpNfcLease" [ 971.539480] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 971.540737] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f811aa8a-3ebc-4e47-a887-84f898798d1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.554998] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5246482d-12ef-8af3-ac37-4e4be99bb70e/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 971.555393] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5246482d-12ef-8af3-ac37-4e4be99bb70e/disk-0.vmdk. {{(pid=65758) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 971.569348] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660854, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.636371] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b7456cd-5c77-4ee3-9341-41c86ebd8097 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.096s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.648875] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5243184b-3b5f-0d39-2eb6-77616ad3a648, 'name': SearchDatastore_Task, 'duration_secs': 0.013232} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.648875] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.648875] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.648875] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.648875] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.648875] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 971.653406] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd33a201-423a-4407-a324-b64bf566a3bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.656949] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5d60d957-01f8-4c40-9844-311e73a1b077 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.676624] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660857, 'name': Rename_Task, 'duration_secs': 0.49963} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.678465] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 971.680481] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 971.680481] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 971.680481] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c400488-8373-48cc-bcd3-12975e052c81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.682363] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b489a09a-a2ca-424a-a25d-35fc0b2e0c63 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.692949] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 971.692949] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524da261-255b-9ea2-511a-5d6ae54d8843" [ 971.692949] env[65758]: _type = "Task" [ 971.692949] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.694794] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 971.694794] env[65758]: value = "task-4660858" [ 971.694794] env[65758]: _type = "Task" [ 971.694794] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.709895] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524da261-255b-9ea2-511a-5d6ae54d8843, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.711593] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660858, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.723094] env[65758]: DEBUG nova.network.neutron [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 971.927965] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522d4b5a-718b-4a39-a608-2fc608712398 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.937985] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2cf1a9-0fd9-48bd-b6aa-bd611d3c61f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.973828] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.978623] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80ddaa0-e5d5-4ba2-bcbe-15c014010d21 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.992146] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83797b6-71e1-488e-aef4-21a81c263167 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.009307] env[65758]: DEBUG nova.compute.provider_tree [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.063160] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660854, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.628476} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.063984] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] df46c28d-7cbd-490e-8db2-9730e4d9f953/df46c28d-7cbd-490e-8db2-9730e4d9f953.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.064436] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.064883] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b751eb08-0f8d-446c-8224-d765b7f04b16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.075513] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 972.075513] env[65758]: value = "task-4660859" [ 972.075513] env[65758]: _type = "Task" [ 972.075513] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.087065] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.209424] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524da261-255b-9ea2-511a-5d6ae54d8843, 'name': SearchDatastore_Task, 'duration_secs': 0.020528} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.220028] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a10ea1f6-dae6-4f85-af64-aadf8a010991 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.223417] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660858, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.227273] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Releasing lock "refresh_cache-fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.231030] env[65758]: DEBUG nova.compute.manager [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 972.231030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.231376] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95abb291-efa4-438e-85c4-0aacd7b54de5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.237902] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 972.237902] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52368cdc-01f2-57e0-9eb6-adf962028ef9" [ 972.237902] env[65758]: _type = "Task" [ 972.237902] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.246483] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.248043] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35254fa6-08a3-4ba8-81fa-56afa799c4f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.256563] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52368cdc-01f2-57e0-9eb6-adf962028ef9, 'name': SearchDatastore_Task, 'duration_secs': 0.014732} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.261023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.261467] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695/be3de9bd-da98-4c7e-ad7c-933245523695.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 972.262231] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 972.262231] env[65758]: value = "task-4660860" [ 972.262231] env[65758]: _type = "Task" [ 972.262231] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.262504] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cb1c81a-5e6d-4e82-bf7e-5b8ea43f14fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.281835] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.285593] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 972.285593] env[65758]: value = "task-4660861" [ 972.285593] env[65758]: _type = "Task" [ 972.285593] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.296906] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660861, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.367265] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0ecdf0-b075-4892-8085-6664ad55d7bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.399507] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance '3ff9192b-3956-49f6-afd2-827759826056' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 972.515862] env[65758]: DEBUG nova.scheduler.client.report [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.590367] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076968} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.593852] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.594351] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0fc3b6-ef81-4134-b1a2-4c84dfcc7dec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.622657] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] df46c28d-7cbd-490e-8db2-9730e4d9f953/df46c28d-7cbd-490e-8db2-9730e4d9f953.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.627018] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dffc20d1-084b-4cb4-aaad-c924b84d8a04 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.656919] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 972.656919] env[65758]: value = "task-4660862" [ 972.656919] env[65758]: _type = "Task" [ 972.656919] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.673147] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660862, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.673147] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Completed reading data from the image iterator. {{(pid=65758) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 972.673480] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5246482d-12ef-8af3-ac37-4e4be99bb70e/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 972.674489] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2beb6120-dd9d-4ed5-b229-2727e6259d05 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.684807] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5246482d-12ef-8af3-ac37-4e4be99bb70e/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 972.684807] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5246482d-12ef-8af3-ac37-4e4be99bb70e/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 972.684807] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d2f95624-33d7-4324-99ac-0e15a8d178b3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.712354] env[65758]: DEBUG oslo_vmware.api [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660858, 'name': PowerOnVM_Task, 'duration_secs': 0.708095} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.712665] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.712937] env[65758]: INFO nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Took 10.28 seconds to spawn the instance on the hypervisor. [ 972.713417] env[65758]: DEBUG nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 972.714482] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8711da7-ffc7-45be-a91b-fd33c84de8f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.781052] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.805148] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660861, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.908254] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.908696] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c237864a-4c2c-41e8-be34-3be632ce0294 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.923332] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 972.923332] env[65758]: value = "task-4660863" [ 972.923332] env[65758]: _type = "Task" [ 972.923332] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.938740] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660863, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.011667] env[65758]: DEBUG oslo_vmware.rw_handles [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5246482d-12ef-8af3-ac37-4e4be99bb70e/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 973.012157] env[65758]: INFO nova.virt.vmwareapi.images [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Downloaded image file data fe1141ac-cd89-43cf-a723-116931d6815e [ 973.012933] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea5758d-c599-4f07-970e-b401e8eb8005 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.032725] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.091s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.036471] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.600s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.038409] env[65758]: INFO nova.compute.claims [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.042755] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7304af75-43bf-4ac0-9a09-8f836c0d2505 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.072151] env[65758]: INFO nova.virt.vmwareapi.images [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] The imported VM was unregistered [ 973.076967] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Caching image {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 973.076967] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating directory with path [datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.077719] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5d4012b-fa09-4a7a-834b-5b8e3594b4a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.084977] env[65758]: INFO nova.scheduler.client.report [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Deleted allocations for instance 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3 [ 973.165302] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Created directory with path [datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.165689] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696/OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696.vmdk to [datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e/fe1141ac-cd89-43cf-a723-116931d6815e.vmdk. {{(pid=65758) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 973.167446] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-fa3fb518-a14a-41d6-8ad7-c3be5ab194f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.180955] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660862, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.184602] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 973.184602] env[65758]: value = "task-4660865" [ 973.184602] env[65758]: _type = "Task" [ 973.184602] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.199361] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660865, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.236863] env[65758]: INFO nova.compute.manager [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Took 27.07 seconds to build instance. [ 973.281048] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660860, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.300346] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660861, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.760103} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.300670] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695/be3de9bd-da98-4c7e-ad7c-933245523695.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 973.300881] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 973.301184] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39f889ac-b20b-498c-a60f-f869f76a1495 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.310678] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 973.310678] env[65758]: value = "task-4660866" [ 973.310678] env[65758]: _type = "Task" [ 973.310678] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.322795] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.441204] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660863, 'name': PowerOffVM_Task, 'duration_secs': 0.404577} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.441204] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.441204] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance '3ff9192b-3956-49f6-afd2-827759826056' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 973.599267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-37608026-d5e1-45bb-b187-f564fb78ab9f tempest-ListServerFiltersTestJSON-514582576 tempest-ListServerFiltersTestJSON-514582576-project-member] Lock "3a7d0c08-9de6-47f4-a0c3-871458ccc4e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.903s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.676564] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660862, 'name': ReconfigVM_Task, 'duration_secs': 0.529955} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.676564] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfigured VM instance instance-00000045 to attach disk [datastore2] df46c28d-7cbd-490e-8db2-9730e4d9f953/df46c28d-7cbd-490e-8db2-9730e4d9f953.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.677544] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_options': None, 'guest_format': None, 'encrypted': False, 'encryption_secret_uuid': None, 'device_type': 'disk', 'encryption_format': None, 'device_name': '/dev/sda', 'size': 0, 'disk_bus': None, 'boot_index': 0, 'image_id': '75a6399b-5100-4c51-b5cf-162bd505a28f'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'}, 'attachment_id': 'fbd53bc9-dfec-4489-b67e-515e1a648db7', 'disk_bus': None, 'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=65758) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 973.677848] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 973.678089] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 973.678939] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc812c98-bfe7-45b4-b1a9-4a8ba1c39f3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.714888] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660865, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.714888] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131b7508-4289-40be-8f0a-99a15e7c8e64 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.746332] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05/volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.749167] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d3c53b9e-bb4b-4e79-b8e1-00855485641d tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.593s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.749266] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a32d51e-8beb-4f29-a4e3-06dde48b7b82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.791973] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660860, 'name': PowerOffVM_Task, 'duration_secs': 1.102607} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.791973] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.791973] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.791973] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 973.791973] env[65758]: value = "task-4660867" [ 973.791973] env[65758]: _type = "Task" [ 973.791973] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.791973] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d68a5c38-0bb8-4e60-a4eb-a4557525088d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.804884] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660867, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.826465] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0999} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.828447] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 973.828768] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.828985] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.829317] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Deleting the datastore file [datastore1] fcb795c2-dd13-458a-a71e-1c9e4fdc5e06 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.830125] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa74197-7bd9-43e6-a689-aca104354ed4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.833077] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6078f80-eda3-46ad-a797-abd643fb41ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.864690] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695/be3de9bd-da98-4c7e-ad7c-933245523695.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.866483] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-643277cb-6cf6-429e-bf4d-84d5d208ff92 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.887997] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for the task: (returnval){ [ 973.887997] env[65758]: value = "task-4660869" [ 973.887997] env[65758]: _type = "Task" [ 973.887997] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.902590] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 973.902590] env[65758]: value = "task-4660870" [ 973.902590] env[65758]: _type = "Task" [ 973.902590] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.902979] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.920570] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660870, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.951265] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 973.953024] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 973.953024] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 973.953024] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 973.953024] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 973.953024] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 973.953024] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 973.953024] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 973.953024] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 973.953715] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 973.953715] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 973.958658] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3d305de-6936-49aa-b109-9b05c79e91b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.982698] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 973.982698] env[65758]: value = "task-4660871" [ 973.982698] env[65758]: _type = "Task" [ 973.982698] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.995038] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660871, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.200795] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660865, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.303110] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660867, 'name': ReconfigVM_Task, 'duration_secs': 0.407749} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.303510] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfigured VM instance instance-00000045 to attach disk [datastore2] volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05/volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 974.311116] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9aac4c0-d16f-4ce2-b03e-ded70bfcd9ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.329088] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 974.329088] env[65758]: value = "task-4660872" [ 974.329088] env[65758]: _type = "Task" [ 974.329088] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.341038] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660872, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.399719] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.417817] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660870, 'name': ReconfigVM_Task, 'duration_secs': 0.393838} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.418130] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Reconfigured VM instance instance-0000004a to attach disk [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695/be3de9bd-da98-4c7e-ad7c-933245523695.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 974.418853] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7679b4e-b7b6-4ea5-a608-d6932fd8a7d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.428286] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 974.428286] env[65758]: value = "task-4660873" [ 974.428286] env[65758]: _type = "Task" [ 974.428286] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.439785] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660873, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.497914] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.528812] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7a32b4-78f6-44bc-a29d-389c790c8775 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.537539] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f9b512-7a5e-49a4-9727-d8512dc58c61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.576699] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80d63b4-b5f4-436e-8ddf-3043381f5d33 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.586178] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e207f470-6d54-4212-8ba2-0faf56e7aa84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.605262] env[65758]: DEBUG nova.objects.instance [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.607186] env[65758]: DEBUG nova.compute.provider_tree [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.700535] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660865, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.841158] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660872, 'name': ReconfigVM_Task, 'duration_secs': 0.18556} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.841613] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 974.842270] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-402c677a-59d9-4fa2-a81f-b78e70c1a486 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.852283] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 974.852283] env[65758]: value = "task-4660874" [ 974.852283] env[65758]: _type = "Task" [ 974.852283] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.866059] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660874, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.902027] env[65758]: DEBUG oslo_vmware.api [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Task: {'id': task-4660869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.653273} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.902027] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.902027] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.902027] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.902027] env[65758]: INFO nova.compute.manager [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Took 2.67 seconds to destroy the instance on the hypervisor. [ 974.902027] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 974.902368] env[65758]: DEBUG nova.compute.manager [-] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 974.902368] env[65758]: DEBUG nova.network.neutron [-] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 974.902459] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 974.902997] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 974.903296] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 974.939700] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660873, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.998023] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660871, 'name': ReconfigVM_Task, 'duration_secs': 0.592439} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.998023] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance '3ff9192b-3956-49f6-afd2-827759826056' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 975.113051] env[65758]: DEBUG nova.scheduler.client.report [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 975.116289] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.116519] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.116746] env[65758]: DEBUG nova.network.neutron [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 975.116928] env[65758]: DEBUG nova.objects.instance [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'info_cache' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.199710] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660865, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.267295] env[65758]: DEBUG nova.network.neutron [-] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 975.267295] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 975.366700] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660874, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.446709] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660873, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.508275] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 975.508275] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 975.508275] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 975.508275] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 975.508275] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 975.508275] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 975.508950] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 975.509260] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 975.509462] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 975.509635] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 975.509810] env[65758]: DEBUG nova.virt.hardware [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 975.515500] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Reconfiguring VM instance instance-0000004d to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 975.516345] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a2b45d0-44f4-4b15-99e7-83a31fc09aa4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.538022] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 975.538022] env[65758]: value = "task-4660875" [ 975.538022] env[65758]: _type = "Task" [ 975.538022] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.549774] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660875, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.604685] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.605029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.620114] env[65758]: DEBUG nova.objects.base [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Object Instance<7f5911fb-785e-444c-9408-c6884e06c5d3> lazy-loaded attributes: flavor,info_cache {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 975.622697] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.623419] env[65758]: DEBUG nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 975.626593] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.154s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.628247] env[65758]: INFO nova.compute.claims [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 975.703826] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660865, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.774640] env[65758]: DEBUG nova.network.neutron [-] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 975.866416] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660874, 'name': Rename_Task, 'duration_secs': 0.923557} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.867700] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 975.868272] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9a51b20-52b5-4340-ade1-966ab6f0b560 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.878755] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 975.878755] env[65758]: value = "task-4660876" [ 975.878755] env[65758]: _type = "Task" [ 975.878755] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.890313] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660876, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.941734] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660873, 'name': Rename_Task, 'duration_secs': 1.208147} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.942180] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 975.942466] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1da3baf1-cb45-466f-8ddf-5de2e62cde99 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.950683] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 975.950683] env[65758]: value = "task-4660877" [ 975.950683] env[65758]: _type = "Task" [ 975.950683] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.960852] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.049190] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660875, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.110428] env[65758]: DEBUG nova.compute.utils [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 976.127049] env[65758]: WARNING neutronclient.v2_0.client [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 976.128819] env[65758]: WARNING openstack [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 976.129445] env[65758]: WARNING openstack [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 976.144129] env[65758]: DEBUG nova.compute.utils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 976.149031] env[65758]: DEBUG nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 976.149354] env[65758]: DEBUG nova.network.neutron [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 976.149912] env[65758]: WARNING neutronclient.v2_0.client [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 976.150362] env[65758]: WARNING neutronclient.v2_0.client [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 976.150996] env[65758]: WARNING openstack [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 976.151401] env[65758]: WARNING openstack [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 976.205071] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660865, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.921059} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.205395] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696/OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696.vmdk to [datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e/fe1141ac-cd89-43cf-a723-116931d6815e.vmdk. [ 976.205605] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Cleaning up location [datastore2] OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696 {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 976.205842] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_882b6f3f-08a6-40a8-b84c-735baf69c696 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.206058] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a57bb0a-c1fc-4a9f-926f-043d29774cdb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.216064] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 976.216064] env[65758]: value = "task-4660878" [ 976.216064] env[65758]: _type = "Task" [ 976.216064] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.227768] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.240249] env[65758]: DEBUG nova.policy [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8898aef8104bf582ec78e9c6a5ee2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8be788d761114dfca7244f953b571c7d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 976.278939] env[65758]: INFO nova.compute.manager [-] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Took 1.38 seconds to deallocate network for instance. [ 976.400648] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660876, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.467874] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660877, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.553185] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660875, 'name': ReconfigVM_Task, 'duration_secs': 0.564481} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.553541] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Reconfigured VM instance instance-0000004d to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 976.554403] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9acfe04-73fe-4e80-99eb-a24099201e76 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.579036] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 3ff9192b-3956-49f6-afd2-827759826056/3ff9192b-3956-49f6-afd2-827759826056.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.579413] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8f3b047-a17e-44c7-91c4-62d2336e5de2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.604146] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 976.604146] env[65758]: value = "task-4660879" [ 976.604146] env[65758]: _type = "Task" [ 976.604146] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.614594] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.615298] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.654258] env[65758]: DEBUG nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 976.694262] env[65758]: DEBUG nova.network.neutron [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Successfully created port: 91228388-8d10-417b-b79f-c45873d878e3 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 976.730311] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.055328} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.730910] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.731026] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e/fe1141ac-cd89-43cf-a723-116931d6815e.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.731304] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e/fe1141ac-cd89-43cf-a723-116931d6815e.vmdk to [datastore2] a6ed7451-7b59-4ed9-8fb7-871d6107a272/a6ed7451-7b59-4ed9-8fb7-871d6107a272.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.731629] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9ccbc4c-d070-48a9-8c92-7d536ddf2759 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.747548] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 976.747548] env[65758]: value = "task-4660880" [ 976.747548] env[65758]: _type = "Task" [ 976.747548] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.760254] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.791487] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.890828] env[65758]: DEBUG oslo_vmware.api [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660876, 'name': PowerOnVM_Task, 'duration_secs': 0.637486} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.891200] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.891348] env[65758]: DEBUG nova.compute.manager [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 976.892176] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c64263-2202-4ddb-843f-4983c2c8eb40 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.962636] env[65758]: DEBUG oslo_vmware.api [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660877, 'name': PowerOnVM_Task, 'duration_secs': 0.624772} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.965865] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.966177] env[65758]: DEBUG nova.compute.manager [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 976.967592] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85eaf77-28f9-4dcf-aec2-0d21f4b66d48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.026165] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101c6a89-39fd-4dce-a472-dab1a396e96b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.036256] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1265cb-54eb-497d-94eb-ce018e6b2b51 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.072641] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13eb7ade-0cc3-486f-bbd0-469eba7f9587 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.081695] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cceeb439-8299-4e1d-bb7f-c51f814c410f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.104160] env[65758]: DEBUG nova.compute.provider_tree [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.120649] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660879, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.259755] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.417769] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.486214] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.602661] env[65758]: DEBUG nova.scheduler.client.report [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 977.618061] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660879, 'name': ReconfigVM_Task, 'duration_secs': 0.966796} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.618381] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 3ff9192b-3956-49f6-afd2-827759826056/3ff9192b-3956-49f6-afd2-827759826056.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.618801] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance '3ff9192b-3956-49f6-afd2-827759826056' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 977.666533] env[65758]: DEBUG nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 977.698310] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 977.698581] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.698738] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 977.698923] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.699372] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 977.699585] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 977.699811] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 977.699975] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 977.700155] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 977.700560] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 977.700809] env[65758]: DEBUG nova.virt.hardware [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 977.702300] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f359febf-0d55-4a1c-a8a5-1388a3538c47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.712717] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4542f9-7d2c-4bc3-9c73-c9e429b9d898 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.730055] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.730397] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.730659] env[65758]: INFO nova.compute.manager [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Attaching volume 0659d25b-87ba-47b1-be68-7ea7275165bb to /dev/sdb [ 977.762411] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660880, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.776272] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eae1cf7-fc1e-40d4-9237-391d32b17042 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.785937] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa910fba-1125-4560-a74a-84b412cccf43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.805171] env[65758]: DEBUG nova.virt.block_device [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Updating existing volume attachment record: 7f30f6aa-04a7-4414-8dca-d756c3f188dc {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 978.020109] env[65758]: WARNING neutronclient.v2_0.client [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.020931] env[65758]: WARNING openstack [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 978.021322] env[65758]: WARNING openstack [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 978.112540] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.112755] env[65758]: DEBUG nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 978.117164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.212s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.117422] env[65758]: DEBUG nova.objects.instance [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'resources' on Instance uuid ba3153f2-8e6f-469c-8730-957c5eebe97b {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 978.126757] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86900144-3ff7-4bdb-808f-bbdd2ebf527a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.156909] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3ffae7-f578-4f87-881d-3f54b7df5b4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.177270] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance '3ff9192b-3956-49f6-afd2-827759826056' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 978.266445] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660880, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.289671] env[65758]: DEBUG nova.network.neutron [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updating instance_info_cache with network_info: [{"id": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "address": "fa:16:3e:f0:a2:8e", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb28e5b-cb", "ovs_interfaceid": "1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 978.382265] env[65758]: DEBUG nova.network.neutron [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Successfully updated port: 91228388-8d10-417b-b79f-c45873d878e3 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 978.621149] env[65758]: DEBUG nova.compute.utils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 978.626415] env[65758]: DEBUG nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 978.626950] env[65758]: DEBUG nova.network.neutron [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 978.627249] env[65758]: WARNING neutronclient.v2_0.client [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.627497] env[65758]: WARNING neutronclient.v2_0.client [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.628144] env[65758]: WARNING openstack [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 978.630031] env[65758]: WARNING openstack [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 978.689903] env[65758]: WARNING neutronclient.v2_0.client [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.689903] env[65758]: WARNING neutronclient.v2_0.client [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 978.699550] env[65758]: DEBUG nova.policy [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20fab3a60af1484aae8eea4e926b259a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3efa562362e94a48851ef7efa8c35123', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 978.762493] env[65758]: DEBUG nova.network.neutron [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Port 1a0cdbf3-b230-4f89-999a-4886f142722c binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 978.774040] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660880, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.797697] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Releasing lock "refresh_cache-7f5911fb-785e-444c-9408-c6884e06c5d3" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.885620] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "refresh_cache-6981b99e-8e9f-459a-b356-9ed726c268ed" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.885784] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "refresh_cache-6981b99e-8e9f-459a-b356-9ed726c268ed" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.885976] env[65758]: DEBUG nova.network.neutron [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 979.042898] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54db421d-d861-47fe-9382-7575f3f1c24a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.054950] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a56a37-d38b-459f-becf-7e4a34d6af3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.090749] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3374ae54-e5c0-4ce3-a759-2bfe1dcc9e0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.101020] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c39284-d2f6-41f4-b77b-8dc299b8dbe6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.117235] env[65758]: DEBUG nova.compute.provider_tree [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.127800] env[65758]: DEBUG nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 979.242153] env[65758]: DEBUG nova.network.neutron [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Successfully created port: f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 979.275522] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660880, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.328530] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.328530] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.391562] env[65758]: WARNING openstack [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 979.392092] env[65758]: WARNING openstack [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 979.443560] env[65758]: DEBUG nova.network.neutron [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 979.565018] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquiring lock "37bae4b3-6959-4f44-8600-26a4f859103c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.565270] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "37bae4b3-6959-4f44-8600-26a4f859103c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.606820] env[65758]: DEBUG nova.compute.manager [req-17b6b67a-3e27-499a-a6c8-4cbaca584203 req-c960f66a-7aba-4ad9-85c7-076f6370a7f8 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Received event network-vif-plugged-91228388-8d10-417b-b79f-c45873d878e3 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 979.606963] env[65758]: DEBUG oslo_concurrency.lockutils [req-17b6b67a-3e27-499a-a6c8-4cbaca584203 req-c960f66a-7aba-4ad9-85c7-076f6370a7f8 service nova] Acquiring lock "6981b99e-8e9f-459a-b356-9ed726c268ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.607168] env[65758]: DEBUG oslo_concurrency.lockutils [req-17b6b67a-3e27-499a-a6c8-4cbaca584203 req-c960f66a-7aba-4ad9-85c7-076f6370a7f8 service nova] Lock "6981b99e-8e9f-459a-b356-9ed726c268ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.607272] env[65758]: DEBUG oslo_concurrency.lockutils [req-17b6b67a-3e27-499a-a6c8-4cbaca584203 req-c960f66a-7aba-4ad9-85c7-076f6370a7f8 service nova] Lock "6981b99e-8e9f-459a-b356-9ed726c268ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.607442] env[65758]: DEBUG nova.compute.manager [req-17b6b67a-3e27-499a-a6c8-4cbaca584203 req-c960f66a-7aba-4ad9-85c7-076f6370a7f8 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] No waiting events found dispatching network-vif-plugged-91228388-8d10-417b-b79f-c45873d878e3 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 979.607657] env[65758]: WARNING nova.compute.manager [req-17b6b67a-3e27-499a-a6c8-4cbaca584203 req-c960f66a-7aba-4ad9-85c7-076f6370a7f8 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Received unexpected event network-vif-plugged-91228388-8d10-417b-b79f-c45873d878e3 for instance with vm_state building and task_state spawning. [ 979.620523] env[65758]: DEBUG nova.scheduler.client.report [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.634121] env[65758]: INFO nova.virt.block_device [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Booting with volume bf4db715-09f3-4c4a-8572-d4bc048f0a0f at /dev/sda [ 979.686460] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb734841-e16b-4c0d-96d9-5d591f835fc0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.699426] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3b785d-79cf-4a04-afde-83b0f4036b0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.714699] env[65758]: WARNING neutronclient.v2_0.client [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 979.714699] env[65758]: WARNING openstack [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 979.714699] env[65758]: WARNING openstack [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 979.750526] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-072d954e-4a2d-4c21-b5e4-e0d8679e34e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.762687] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec9fbdf-6ecf-4fe1-9d04-4fbca2ebe42e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.790854] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660880, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.801771] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "3ff9192b-3956-49f6-afd2-827759826056-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.802065] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.802197] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.803553] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.804204] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efb26301-bc36-4d60-9ec4-cf5fc8f8fdaf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.821689] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8843d7a2-da41-49c4-9122-644060673cd8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.827279] env[65758]: DEBUG oslo_vmware.api [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 979.827279] env[65758]: value = "task-4660884" [ 979.827279] env[65758]: _type = "Task" [ 979.827279] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.833461] env[65758]: DEBUG nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 979.836880] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cdccd9-93ea-472f-bdb5-57c43cb4dc87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.847331] env[65758]: DEBUG oslo_vmware.api [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660884, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.853011] env[65758]: DEBUG nova.virt.block_device [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Updating existing volume attachment record: fb6f1e40-659d-488a-b051-671458301d03 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 979.962280] env[65758]: DEBUG nova.network.neutron [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Updating instance_info_cache with network_info: [{"id": "91228388-8d10-417b-b79f-c45873d878e3", "address": "fa:16:3e:a8:8c:cf", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91228388-8d", "ovs_interfaceid": "91228388-8d10-417b-b79f-c45873d878e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 980.068242] env[65758]: DEBUG nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 980.126532] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.132611] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.991s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.132837] env[65758]: DEBUG nova.objects.instance [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 980.169697] env[65758]: INFO nova.scheduler.client.report [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleted allocations for instance ba3153f2-8e6f-469c-8730-957c5eebe97b [ 980.267499] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660880, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.045817} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.267859] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fe1141ac-cd89-43cf-a723-116931d6815e/fe1141ac-cd89-43cf-a723-116931d6815e.vmdk to [datastore2] a6ed7451-7b59-4ed9-8fb7-871d6107a272/a6ed7451-7b59-4ed9-8fb7-871d6107a272.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 980.268813] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14d39e6-018f-40fc-a1f8-03a329a12ac4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.296967] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] a6ed7451-7b59-4ed9-8fb7-871d6107a272/a6ed7451-7b59-4ed9-8fb7-871d6107a272.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.297500] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cca5698f-06ca-4075-baf0-f6249036a48b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.325477] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 980.325477] env[65758]: value = "task-4660885" [ 980.325477] env[65758]: _type = "Task" [ 980.325477] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.344619] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660885, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.351802] env[65758]: DEBUG oslo_vmware.api [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660884, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.365558] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.468533] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "refresh_cache-6981b99e-8e9f-459a-b356-9ed726c268ed" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.469238] env[65758]: DEBUG nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Instance network_info: |[{"id": "91228388-8d10-417b-b79f-c45873d878e3", "address": "fa:16:3e:a8:8c:cf", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91228388-8d", "ovs_interfaceid": "91228388-8d10-417b-b79f-c45873d878e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 980.469894] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:8c:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f87a752-ebb0-49a4-a67b-e356fa45b89b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91228388-8d10-417b-b79f-c45873d878e3', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.478555] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 980.479329] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.479566] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e80212a-31f3-4451-b6c6-157019c8408f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.503741] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.503741] env[65758]: value = "task-4660886" [ 980.503741] env[65758]: _type = "Task" [ 980.503741] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.513499] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660886, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.593746] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.678325] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e0e2f5c-b557-472d-a3e8-c5a3a4c41301 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.837s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.682415] env[65758]: DEBUG oslo_concurrency.lockutils [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] Acquired lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.682415] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e13fb2e-69cd-47bc-850b-025c42e8214e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.691084] env[65758]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 980.691995] env[65758]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=65758) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 980.692347] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd622e2c-5c85-4aac-807d-3be2738e77bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.703780] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6854c98b-9716-4dd9-b09f-0c5c00a96ef6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.754935] env[65758]: ERROR root [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-909953' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-909953' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-909953' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-909953'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-909953' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-909953' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-909953'}\n"]: nova.exception.InstanceNotFound: Instance ba3153f2-8e6f-469c-8730-957c5eebe97b could not be found. [ 980.755165] env[65758]: DEBUG oslo_concurrency.lockutils [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] Releasing lock "ba3153f2-8e6f-469c-8730-957c5eebe97b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.755296] env[65758]: DEBUG nova.compute.manager [req-ea92feb0-bb4c-481f-be1f-a1f5488ef5f7 req-ee40c6d5-c19d-4364-86a2-2a50c48c9d09 service nova] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Detach interface failed, port_id=ffa44f11-41da-49d5-af63-8c9328cd2c67, reason: Instance ba3153f2-8e6f-469c-8730-957c5eebe97b could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 980.818921] env[65758]: WARNING neutronclient.v2_0.client [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 980.840867] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660885, 'name': ReconfigVM_Task, 'duration_secs': 0.329652} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.844276] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Reconfigured VM instance instance-00000054 to attach disk [datastore2] a6ed7451-7b59-4ed9-8fb7-871d6107a272/a6ed7451-7b59-4ed9-8fb7-871d6107a272.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.845107] env[65758]: DEBUG oslo_vmware.api [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660884, 'name': PowerOnVM_Task, 'duration_secs': 0.919114} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.845382] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59f82bb9-c864-4848-b04e-f583f7f97c25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.847170] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 980.847442] env[65758]: DEBUG nova.compute.manager [None req-f5132884-44a4-41cc-a6df-203bf0c541f5 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 980.848233] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e53be0-c5fd-4574-84d5-99b0a53bad60 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.860370] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 980.860370] env[65758]: value = "task-4660887" [ 980.860370] env[65758]: _type = "Task" [ 980.860370] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.996547] env[65758]: DEBUG nova.network.neutron [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Successfully updated port: f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 981.018030] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660886, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.142625] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dfb4e54b-5a8c-492f-8b34-83597be6104c tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.147120] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.129s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.147422] env[65758]: DEBUG nova.objects.instance [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lazy-loading 'resources' on Instance uuid 33098961-060f-4503-a805-6ae7351b45ea {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.382103] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660887, 'name': Rename_Task, 'duration_secs': 0.234521} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.384614] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.385522] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8935728f-b7dd-40a3-9409-f5fa1622f48c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.400993] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 981.400993] env[65758]: value = "task-4660888" [ 981.400993] env[65758]: _type = "Task" [ 981.400993] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.415511] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.418234] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.418411] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.418581] env[65758]: DEBUG nova.network.neutron [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 981.423323] env[65758]: DEBUG nova.compute.manager [req-7551c83c-9aa7-4dc6-9542-b2eeb3b7a5b2 req-db985132-194d-4611-ab32-8509db1f1480 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Received event network-vif-plugged-f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 981.424129] env[65758]: DEBUG oslo_concurrency.lockutils [req-7551c83c-9aa7-4dc6-9542-b2eeb3b7a5b2 req-db985132-194d-4611-ab32-8509db1f1480 service nova] Acquiring lock "0ce11868-fee2-40d3-9433-7bc398a1f756-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.424129] env[65758]: DEBUG oslo_concurrency.lockutils [req-7551c83c-9aa7-4dc6-9542-b2eeb3b7a5b2 req-db985132-194d-4611-ab32-8509db1f1480 service nova] Lock "0ce11868-fee2-40d3-9433-7bc398a1f756-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.424129] env[65758]: DEBUG oslo_concurrency.lockutils [req-7551c83c-9aa7-4dc6-9542-b2eeb3b7a5b2 req-db985132-194d-4611-ab32-8509db1f1480 service nova] Lock "0ce11868-fee2-40d3-9433-7bc398a1f756-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.424129] env[65758]: DEBUG nova.compute.manager [req-7551c83c-9aa7-4dc6-9542-b2eeb3b7a5b2 req-db985132-194d-4611-ab32-8509db1f1480 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] No waiting events found dispatching network-vif-plugged-f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 981.424266] env[65758]: WARNING nova.compute.manager [req-7551c83c-9aa7-4dc6-9542-b2eeb3b7a5b2 req-db985132-194d-4611-ab32-8509db1f1480 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Received unexpected event network-vif-plugged-f7953062-77ac-411d-9809-b817fca06bbb for instance with vm_state building and task_state block_device_mapping. [ 981.500885] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Acquiring lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.501019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Acquired lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.501214] env[65758]: DEBUG nova.network.neutron [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 981.521139] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660886, 'name': CreateVM_Task, 'duration_secs': 0.580565} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.521347] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 981.521932] env[65758]: WARNING neutronclient.v2_0.client [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 981.522341] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.522486] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.522833] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 981.523117] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4655cfe9-f6f8-44f0-a7dc-24c913f4ad2d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.529983] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 981.529983] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e483a8-eee9-7554-e8a1-7263f6fc8ace" [ 981.529983] env[65758]: _type = "Task" [ 981.529983] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.541011] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e483a8-eee9-7554-e8a1-7263f6fc8ace, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.912788] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660888, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.923290] env[65758]: WARNING neutronclient.v2_0.client [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 981.924103] env[65758]: WARNING openstack [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 981.924503] env[65758]: WARNING openstack [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 981.969997] env[65758]: DEBUG nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 981.970265] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 981.971278] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 981.971278] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 981.971278] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 981.971278] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 981.971278] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 981.971278] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 981.971501] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 981.971592] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 981.974231] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 981.974495] env[65758]: DEBUG nova.virt.hardware [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 981.975613] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7e429a-4f4b-431b-af07-74d06ad39a5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.988381] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b59299-41e8-4557-8854-afd5a2c315a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.014917] env[65758]: WARNING openstack [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 982.015518] env[65758]: WARNING openstack [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 982.044256] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e483a8-eee9-7554-e8a1-7263f6fc8ace, 'name': SearchDatastore_Task, 'duration_secs': 0.015861} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.047074] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.047485] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.047554] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.047672] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.047858] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.048391] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b45d35e-d72e-4967-99c4-fcf7fe29c5d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.059458] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.059695] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 982.060490] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f375f929-8915-4ba5-8199-5e2aebdfbc52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.068193] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 982.068193] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5210f29a-1e85-b181-9d48-5d9539c074df" [ 982.068193] env[65758]: _type = "Task" [ 982.068193] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.080661] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5210f29a-1e85-b181-9d48-5d9539c074df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.090967] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dae1340-8e22-4d8b-a60f-d8980d1da1c7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.102546] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d43b6c-676b-4d05-ad9f-7159b4bfffca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.135138] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bfd0a3-539b-4f44-a204-026c3c3cf974 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.145092] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e465b975-2745-4a77-ae90-06912451f8c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.159385] env[65758]: DEBUG nova.compute.provider_tree [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.164983] env[65758]: DEBUG nova.network.neutron [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 982.411889] env[65758]: DEBUG oslo_vmware.api [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660888, 'name': PowerOnVM_Task, 'duration_secs': 0.60646} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.412180] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.412400] env[65758]: INFO nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Took 17.03 seconds to spawn the instance on the hypervisor. [ 982.412541] env[65758]: DEBUG nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 982.413350] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfbf240-d346-48c2-98b0-8b9723f43b53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.511928] env[65758]: DEBUG nova.compute.manager [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Received event network-changed-91228388-8d10-417b-b79f-c45873d878e3 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 982.511928] env[65758]: DEBUG nova.compute.manager [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Refreshing instance network info cache due to event network-changed-91228388-8d10-417b-b79f-c45873d878e3. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 982.512089] env[65758]: DEBUG oslo_concurrency.lockutils [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] Acquiring lock "refresh_cache-6981b99e-8e9f-459a-b356-9ed726c268ed" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.512247] env[65758]: DEBUG oslo_concurrency.lockutils [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] Acquired lock "refresh_cache-6981b99e-8e9f-459a-b356-9ed726c268ed" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.512545] env[65758]: DEBUG nova.network.neutron [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Refreshing network info cache for port 91228388-8d10-417b-b79f-c45873d878e3 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 982.520052] env[65758]: WARNING neutronclient.v2_0.client [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 982.520052] env[65758]: WARNING openstack [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 982.520052] env[65758]: WARNING openstack [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 982.530377] env[65758]: WARNING neutronclient.v2_0.client [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 982.531212] env[65758]: WARNING openstack [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 982.531719] env[65758]: WARNING openstack [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 982.540669] env[65758]: WARNING neutronclient.v2_0.client [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 982.541422] env[65758]: WARNING openstack [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 982.541905] env[65758]: WARNING openstack [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 982.583516] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5210f29a-1e85-b181-9d48-5d9539c074df, 'name': SearchDatastore_Task, 'duration_secs': 0.024156} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.585134] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e970d400-024f-414c-9358-1f70a62341f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.593198] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 982.593198] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525588a3-019d-71d6-6d3f-feee08212a3b" [ 982.593198] env[65758]: _type = "Task" [ 982.593198] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.603040] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525588a3-019d-71d6-6d3f-feee08212a3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.675027] env[65758]: DEBUG nova.scheduler.client.report [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 982.930105] env[65758]: DEBUG nova.network.neutron [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance_info_cache with network_info: [{"id": "1a0cdbf3-b230-4f89-999a-4886f142722c", "address": "fa:16:3e:a2:d0:8b", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0cdbf3-b2", "ovs_interfaceid": "1a0cdbf3-b230-4f89-999a-4886f142722c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 982.942353] env[65758]: INFO nova.compute.manager [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Took 30.93 seconds to build instance. [ 982.974911] env[65758]: DEBUG nova.network.neutron [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Updating instance_info_cache with network_info: [{"id": "f7953062-77ac-411d-9809-b817fca06bbb", "address": "fa:16:3e:59:27:00", "network": {"id": "7d47c1f7-d953-4e8d-ae38-58ba035ee5ba", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-335033115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3efa562362e94a48851ef7efa8c35123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7953062-77", "ovs_interfaceid": "f7953062-77ac-411d-9809-b817fca06bbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 983.112102] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525588a3-019d-71d6-6d3f-feee08212a3b, 'name': SearchDatastore_Task, 'duration_secs': 0.010516} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.112385] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.113044] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 6981b99e-8e9f-459a-b356-9ed726c268ed/6981b99e-8e9f-459a-b356-9ed726c268ed.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 983.113044] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-364951e1-006e-463b-9225-774a2954cafa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.122145] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 983.122145] env[65758]: value = "task-4660890" [ 983.122145] env[65758]: _type = "Task" [ 983.122145] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.131724] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.186148] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.188723] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.215s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.188966] env[65758]: DEBUG nova.objects.instance [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lazy-loading 'resources' on Instance uuid d5d27a5c-afe4-49a1-a385-0a8f625b5a1e {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.230304] env[65758]: INFO nova.scheduler.client.report [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Deleted allocations for instance 33098961-060f-4503-a805-6ae7351b45ea [ 983.440903] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.450020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86000095-37e7-4bb4-8cf6-9ad769542f49 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.444s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.479362] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Releasing lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.479858] env[65758]: DEBUG nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Instance network_info: |[{"id": "f7953062-77ac-411d-9809-b817fca06bbb", "address": "fa:16:3e:59:27:00", "network": {"id": "7d47c1f7-d953-4e8d-ae38-58ba035ee5ba", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-335033115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3efa562362e94a48851ef7efa8c35123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7953062-77", "ovs_interfaceid": "f7953062-77ac-411d-9809-b817fca06bbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 983.480340] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:27:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13d625c9-77ec-4edb-a56b-9f37a314cc39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7953062-77ac-411d-9809-b817fca06bbb', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 983.491173] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Creating folder: Project (3efa562362e94a48851ef7efa8c35123). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 983.491403] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-113ee72c-40e8-4f2b-8c5c-8afbfbb2eef6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.510589] env[65758]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 983.510785] env[65758]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=65758) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 983.511327] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Folder already exists: Project (3efa562362e94a48851ef7efa8c35123). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 983.511486] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Creating folder: Instances. Parent ref: group-v909967. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 983.511787] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39ec10bc-796a-427d-bc76-09787f11121d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.527440] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Created folder: Instances in parent group-v909967. [ 983.527856] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 983.528017] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 983.528249] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8dda1bca-4bba-45e9-aaf2-c8f82998c0e1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.553162] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 983.553162] env[65758]: value = "task-4660893" [ 983.553162] env[65758]: _type = "Task" [ 983.553162] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.565096] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660893, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.636826] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660890, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.741309] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5afcdbfc-a1e3-4265-8e76-fde531534fa9 tempest-ImagesOneServerNegativeTestJSON-1131823398 tempest-ImagesOneServerNegativeTestJSON-1131823398-project-member] Lock "33098961-060f-4503-a805-6ae7351b45ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.039s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.963750] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebab4e53-3a95-4d5c-88ac-fc374fbbf71a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.988611] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28088f83-13b4-4c14-9b72-613a62708877 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.998119] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance '3ff9192b-3956-49f6-afd2-827759826056' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 984.067312] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660893, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.097951] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac77cc71-c5b2-4f32-8c5a-f98b7d93df3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.107192] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d15085-308a-4dcb-9c90-cfdd650d4d8d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.143300] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac61905f-6769-483f-84e8-43437f763c96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.155012] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113a30c8-bd09-47a7-bc1b-56f15ecbb0a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.158889] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744215} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.160262] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 6981b99e-8e9f-459a-b356-9ed726c268ed/6981b99e-8e9f-459a-b356-9ed726c268ed.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 984.160476] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 984.163730] env[65758]: DEBUG nova.compute.manager [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Received event network-changed-f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 984.163911] env[65758]: DEBUG nova.compute.manager [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Refreshing instance network info cache due to event network-changed-f7953062-77ac-411d-9809-b817fca06bbb. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 984.164170] env[65758]: DEBUG oslo_concurrency.lockutils [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] Acquiring lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.164386] env[65758]: DEBUG oslo_concurrency.lockutils [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] Acquired lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.164556] env[65758]: DEBUG nova.network.neutron [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Refreshing network info cache for port f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 984.166185] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2f42b70-291a-4c77-9bc6-c5443fd2e170 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.177469] env[65758]: DEBUG nova.compute.provider_tree [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.182192] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 984.182192] env[65758]: value = "task-4660894" [ 984.182192] env[65758]: _type = "Task" [ 984.182192] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.192847] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.511342] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.512085] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-274060f0-7b22-4922-b82f-c681ec9d9420 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.524947] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 984.524947] env[65758]: value = "task-4660895" [ 984.524947] env[65758]: _type = "Task" [ 984.524947] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.539718] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.571666] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660893, 'name': CreateVM_Task, 'duration_secs': 0.57114} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.571666] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 984.571666] env[65758]: WARNING neutronclient.v2_0.client [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 984.571666] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909979', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'name': 'volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ce11868-fee2-40d3-9433-7bc398a1f756', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'serial': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f'}, 'attachment_id': 'fb6f1e40-659d-488a-b051-671458301d03', 'disk_bus': None, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=65758) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 984.571666] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Root volume attach. Driver type: vmdk {{(pid=65758) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 984.571666] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad145fe-b0a4-4939-b2d7-710c653c62df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.581374] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe22ab7-4b19-4302-9285-60f298417d81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.585180] env[65758]: WARNING neutronclient.v2_0.client [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 984.585830] env[65758]: WARNING openstack [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 984.586199] env[65758]: WARNING openstack [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 984.599937] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e673a32d-dfeb-41e8-aed2-c19aed6899df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.611824] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-2cd2716e-e961-40d0-b8c7-12149ed36760 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.623508] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 984.623508] env[65758]: value = "task-4660896" [ 984.623508] env[65758]: _type = "Task" [ 984.623508] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.636779] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.670124] env[65758]: WARNING neutronclient.v2_0.client [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 984.671012] env[65758]: WARNING openstack [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 984.671622] env[65758]: WARNING openstack [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 984.682851] env[65758]: DEBUG nova.scheduler.client.report [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.698944] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07405} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.699307] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 984.700181] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f94bdc8-edf1-416a-b493-e1c670f46d28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.727881] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 6981b99e-8e9f-459a-b356-9ed726c268ed/6981b99e-8e9f-459a-b356-9ed726c268ed.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.728648] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-893ffb62-8726-4a01-8dc7-6b0a8298a784 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.750797] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 984.750797] env[65758]: value = "task-4660897" [ 984.750797] env[65758]: _type = "Task" [ 984.750797] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.763743] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660897, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.886595] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 984.886887] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909994', 'volume_id': '0659d25b-87ba-47b1-be68-7ea7275165bb', 'name': 'volume-0659d25b-87ba-47b1-be68-7ea7275165bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a', 'attached_at': '', 'detached_at': '', 'volume_id': '0659d25b-87ba-47b1-be68-7ea7275165bb', 'serial': '0659d25b-87ba-47b1-be68-7ea7275165bb'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 984.888845] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b913211b-a497-45b4-b834-367f1e73a02c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.908735] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00909fcd-f8d3-4001-90d4-cc99c7b1e7ec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.937590] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] volume-0659d25b-87ba-47b1-be68-7ea7275165bb/volume-0659d25b-87ba-47b1-be68-7ea7275165bb.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.938944] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06177f73-5f0b-4a4d-8fc8-71acc651cff5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.959522] env[65758]: DEBUG oslo_vmware.api [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 984.959522] env[65758]: value = "task-4660898" [ 984.959522] env[65758]: _type = "Task" [ 984.959522] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.972223] env[65758]: DEBUG oslo_vmware.api [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.044220] env[65758]: DEBUG oslo_vmware.api [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660895, 'name': PowerOnVM_Task, 'duration_secs': 0.511521} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.044839] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.045311] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5adaad7c-da1c-4879-8950-5a5073ed030c tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance '3ff9192b-3956-49f6-afd2-827759826056' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 985.144016] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 40%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.194915] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.004s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.198701] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.407s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.198998] env[65758]: DEBUG nova.objects.instance [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lazy-loading 'resources' on Instance uuid fcb795c2-dd13-458a-a71e-1c9e4fdc5e06 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.240043] env[65758]: INFO nova.scheduler.client.report [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Deleted allocations for instance d5d27a5c-afe4-49a1-a385-0a8f625b5a1e [ 985.263171] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660897, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.472430] env[65758]: DEBUG oslo_vmware.api [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.647139] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.772421] env[65758]: DEBUG oslo_concurrency.lockutils [None req-648c5e6f-08cd-4aa4-ade0-db788916d8c8 tempest-ServersListShow298Test-1685172082 tempest-ServersListShow298Test-1685172082-project-member] Lock "d5d27a5c-afe4-49a1-a385-0a8f625b5a1e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.215s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.785833] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660897, 'name': ReconfigVM_Task, 'duration_secs': 0.672506} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.787883] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 6981b99e-8e9f-459a-b356-9ed726c268ed/6981b99e-8e9f-459a-b356-9ed726c268ed.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.787883] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c4fe914-9917-46e6-9c31-2d4d65094de9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.797279] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 985.797279] env[65758]: value = "task-4660899" [ 985.797279] env[65758]: _type = "Task" [ 985.797279] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.812555] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660899, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.974276] env[65758]: DEBUG oslo_vmware.api [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660898, 'name': ReconfigVM_Task, 'duration_secs': 0.748091} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.974677] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Reconfigured VM instance instance-00000053 to attach disk [datastore1] volume-0659d25b-87ba-47b1-be68-7ea7275165bb/volume-0659d25b-87ba-47b1-be68-7ea7275165bb.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.987170] env[65758]: DEBUG nova.network.neutron [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Updated VIF entry in instance network info cache for port 91228388-8d10-417b-b79f-c45873d878e3. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 985.987170] env[65758]: DEBUG nova.network.neutron [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Updating instance_info_cache with network_info: [{"id": "91228388-8d10-417b-b79f-c45873d878e3", "address": "fa:16:3e:a8:8c:cf", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91228388-8d", "ovs_interfaceid": "91228388-8d10-417b-b79f-c45873d878e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 985.988769] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-775e434d-85c9-41a7-8d32-4167c1b24c07 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.003636] env[65758]: DEBUG oslo_concurrency.lockutils [req-746cd10e-14be-4daf-9071-c40353e86ae5 req-23065f98-1e87-4e1b-8435-c56b9ae25102 service nova] Releasing lock "refresh_cache-6981b99e-8e9f-459a-b356-9ed726c268ed" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.014886] env[65758]: DEBUG oslo_vmware.api [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 986.014886] env[65758]: value = "task-4660900" [ 986.014886] env[65758]: _type = "Task" [ 986.014886] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.031606] env[65758]: DEBUG oslo_vmware.api [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660900, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.143105] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 63%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.179573] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7abdea6-5656-441d-80c4-c463fde25012 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.190022] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3033f6f-39e4-401a-aa3e-4626ae447386 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.230382] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8d95ae-425e-4732-92d3-ffb0f1145376 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.240701] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36631deb-3990-4e32-b336-d33b53df3687 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.259497] env[65758]: DEBUG nova.compute.provider_tree [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.308499] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660899, 'name': Rename_Task, 'duration_secs': 0.283497} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.308729] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 986.309036] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc29a4d8-f9d3-4247-bb2a-90fc792b400b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.317272] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 986.317272] env[65758]: value = "task-4660901" [ 986.317272] env[65758]: _type = "Task" [ 986.317272] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.329054] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.491264] env[65758]: WARNING neutronclient.v2_0.client [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 986.492098] env[65758]: WARNING openstack [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 986.492370] env[65758]: WARNING openstack [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 986.530620] env[65758]: DEBUG oslo_vmware.api [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660900, 'name': ReconfigVM_Task, 'duration_secs': 0.207145} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.531381] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909994', 'volume_id': '0659d25b-87ba-47b1-be68-7ea7275165bb', 'name': 'volume-0659d25b-87ba-47b1-be68-7ea7275165bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a', 'attached_at': '', 'detached_at': '', 'volume_id': '0659d25b-87ba-47b1-be68-7ea7275165bb', 'serial': '0659d25b-87ba-47b1-be68-7ea7275165bb'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 986.650456] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 76%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.666972] env[65758]: DEBUG nova.network.neutron [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Updated VIF entry in instance network info cache for port f7953062-77ac-411d-9809-b817fca06bbb. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 986.667409] env[65758]: DEBUG nova.network.neutron [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Updating instance_info_cache with network_info: [{"id": "f7953062-77ac-411d-9809-b817fca06bbb", "address": "fa:16:3e:59:27:00", "network": {"id": "7d47c1f7-d953-4e8d-ae38-58ba035ee5ba", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-335033115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3efa562362e94a48851ef7efa8c35123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7953062-77", "ovs_interfaceid": "f7953062-77ac-411d-9809-b817fca06bbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 986.763369] env[65758]: DEBUG nova.scheduler.client.report [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.837759] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660901, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.103023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.103023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.103023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.103023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.103023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.105029] env[65758]: INFO nova.compute.manager [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Terminating instance [ 987.146394] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.173270] env[65758]: DEBUG oslo_concurrency.lockutils [req-a91d9624-be19-4c03-8847-2f82a0bce878 req-ea4d5ca8-f39a-40d9-b2a4-5296c6a0fe98 service nova] Releasing lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.271484] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.073s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.275481] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.858s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.275870] env[65758]: DEBUG nova.objects.instance [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 987.321078] env[65758]: INFO nova.scheduler.client.report [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Deleted allocations for instance fcb795c2-dd13-458a-a71e-1c9e4fdc5e06 [ 987.345414] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660901, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.596921] env[65758]: DEBUG nova.objects.instance [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'flavor' on Instance uuid e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.612716] env[65758]: DEBUG nova.compute.manager [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 987.612716] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 987.615494] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4d9de7-7069-4a3e-a776-7908dda4ff34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.631022] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.631022] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68f1443e-68e3-4447-a612-07d391e4f42f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.640527] env[65758]: DEBUG oslo_vmware.api [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 987.640527] env[65758]: value = "task-4660902" [ 987.640527] env[65758]: _type = "Task" [ 987.640527] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.644891] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 97%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.654059] env[65758]: DEBUG oslo_vmware.api [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.841865] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660901, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.842770] env[65758]: DEBUG oslo_concurrency.lockutils [None req-170b659e-3315-4943-941a-2f1ced883ad2 tempest-ServersAaction247Test-490719661 tempest-ServersAaction247Test-490719661-project-member] Lock "fcb795c2-dd13-458a-a71e-1c9e4fdc5e06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.781s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.107834] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba86567b-7bdb-4f3f-b572-33678b65ac31 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.376s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.145529] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 98%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.156827] env[65758]: DEBUG oslo_vmware.api [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660902, 'name': PowerOffVM_Task, 'duration_secs': 0.271626} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.157442] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.157442] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.157688] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8ae0cd6-5c3c-4524-ab99-06ccc5f902bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.199055] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.199378] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.239371] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.239765] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.240195] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleting the datastore file [datastore2] a6ed7451-7b59-4ed9-8fb7-871d6107a272 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.240928] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ea2900d-4e07-4821-b366-623ddb326d91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.254311] env[65758]: DEBUG oslo_vmware.api [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 988.254311] env[65758]: value = "task-4660904" [ 988.254311] env[65758]: _type = "Task" [ 988.254311] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.270739] env[65758]: DEBUG oslo_vmware.api [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.287748] env[65758]: DEBUG oslo_concurrency.lockutils [None req-daf02f85-221c-43c5-b8e0-6af06e87cc03 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.288997] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 10.803s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.289218] env[65758]: DEBUG nova.objects.instance [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 988.337092] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660901, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.364925] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquiring lock "95509bbe-5aaf-471f-97b3-8a3085797568" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.365196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "95509bbe-5aaf-471f-97b3-8a3085797568" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.590258] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "3ff9192b-3956-49f6-afd2-827759826056" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.590505] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.590735] env[65758]: DEBUG nova.compute.manager [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Going to confirm migration 4 {{(pid=65758) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 988.644755] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 98%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.701808] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.701808] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.705197] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.705197] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.705197] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.709019] env[65758]: DEBUG nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 988.709019] env[65758]: INFO nova.compute.manager [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Terminating instance [ 988.766967] env[65758]: DEBUG oslo_vmware.api [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.366611} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.767297] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.768080] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 988.768080] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 988.768080] env[65758]: INFO nova.compute.manager [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Took 1.16 seconds to destroy the instance on the hypervisor. [ 988.768080] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 988.768244] env[65758]: DEBUG nova.compute.manager [-] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 988.768344] env[65758]: DEBUG nova.network.neutron [-] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 988.768548] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 988.769106] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 988.769410] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 988.836428] env[65758]: DEBUG oslo_vmware.api [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660901, 'name': PowerOnVM_Task, 'duration_secs': 2.169326} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.836764] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 988.837014] env[65758]: INFO nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Took 11.17 seconds to spawn the instance on the hypervisor. [ 988.837129] env[65758]: DEBUG nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 988.839328] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea90781-8a6e-4b20-8b36-2d877bbc8b0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.875881] env[65758]: DEBUG nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 988.893259] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 989.097500] env[65758]: WARNING neutronclient.v2_0.client [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 989.149639] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task} progress is 98%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.217200] env[65758]: DEBUG nova.compute.manager [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 989.217541] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.220052] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afd9a7c8-f0ad-43e9-b08d-eadc8fa19ef7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.229382] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 989.229382] env[65758]: value = "task-4660905" [ 989.229382] env[65758]: _type = "Task" [ 989.229382] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.240299] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660905, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.243163] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.253168] env[65758]: WARNING neutronclient.v2_0.client [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 989.253584] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.253777] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.253969] env[65758]: DEBUG nova.network.neutron [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 989.254231] env[65758]: DEBUG nova.objects.instance [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lazy-loading 'info_cache' on Instance uuid 3ff9192b-3956-49f6-afd2-827759826056 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.301300] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c978015d-0d6b-45cc-b2f7-aa7f91705bf0 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.302924] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.937s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.305848] env[65758]: INFO nova.compute.claims [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.364647] env[65758]: INFO nova.compute.manager [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Took 23.99 seconds to build instance. [ 989.392690] env[65758]: DEBUG nova.compute.manager [req-a9cf62ce-581b-4cd0-b4ab-390c85c5fb20 req-76acfcc0-6fc8-45a7-990e-1843a5e60c4e service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Received event network-vif-deleted-8fc2eeef-dace-49f2-99aa-448810fbaa32 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 989.392690] env[65758]: INFO nova.compute.manager [req-a9cf62ce-581b-4cd0-b4ab-390c85c5fb20 req-76acfcc0-6fc8-45a7-990e-1843a5e60c4e service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Neutron deleted interface 8fc2eeef-dace-49f2-99aa-448810fbaa32; detaching it from the instance and deleting it from the info cache [ 989.392690] env[65758]: DEBUG nova.network.neutron [req-a9cf62ce-581b-4cd0-b4ab-390c85c5fb20 req-76acfcc0-6fc8-45a7-990e-1843a5e60c4e service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 989.401898] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.647142] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660896, 'name': RelocateVM_Task, 'duration_secs': 4.728023} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.647494] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 989.647692] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909979', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'name': 'volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ce11868-fee2-40d3-9433-7bc398a1f756', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'serial': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 989.648564] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecc6540-54af-4531-a276-be3c85209d1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.655189] env[65758]: DEBUG nova.network.neutron [-] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 989.682549] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d08c22c-9b76-4d44-bef9-0384217983b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.714989] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f/volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.716067] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e53d79ac-e487-428d-b213-893cd851e663 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.744028] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660905, 'name': PowerOffVM_Task, 'duration_secs': 0.41399} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.745958] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.746248] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 989.746461] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909994', 'volume_id': '0659d25b-87ba-47b1-be68-7ea7275165bb', 'name': 'volume-0659d25b-87ba-47b1-be68-7ea7275165bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a', 'attached_at': '', 'detached_at': '', 'volume_id': '0659d25b-87ba-47b1-be68-7ea7275165bb', 'serial': '0659d25b-87ba-47b1-be68-7ea7275165bb'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 989.746810] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 989.746810] env[65758]: value = "task-4660906" [ 989.746810] env[65758]: _type = "Task" [ 989.746810] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.747712] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d64f33-d950-4b7f-9f02-fc6bab6102f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.784661] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660906, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.784661] env[65758]: WARNING neutronclient.v2_0.client [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 989.785892] env[65758]: WARNING openstack [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 989.785892] env[65758]: WARNING openstack [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 989.794431] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a48b1f-7f4b-45df-ae7d-7230fe06081d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.810806] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b479d4-d8a0-41b0-9635-1058f2ae22fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.850557] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4747f8-c50e-4475-81e2-03794443a166 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.877422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ce8d87de-717b-4eab-bfd1-d356b148a243 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "6981b99e-8e9f-459a-b356-9ed726c268ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.888s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.878311] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The volume has not been displaced from its original location: [datastore1] volume-0659d25b-87ba-47b1-be68-7ea7275165bb/volume-0659d25b-87ba-47b1-be68-7ea7275165bb.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 989.883814] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 989.884416] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a5b09dc-dd5c-43ab-95e0-5a36eb986971 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.907277] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a68ffeac-3541-4084-b6ed-e7dc13c8ec46 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.923159] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cd3c22-ca77-4ff8-8631-b99ad0915d66 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.935228] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 989.935228] env[65758]: value = "task-4660907" [ 989.935228] env[65758]: _type = "Task" [ 989.935228] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.970283] env[65758]: DEBUG nova.compute.manager [req-a9cf62ce-581b-4cd0-b4ab-390c85c5fb20 req-76acfcc0-6fc8-45a7-990e-1843a5e60c4e service nova] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Detach interface failed, port_id=8fc2eeef-dace-49f2-99aa-448810fbaa32, reason: Instance a6ed7451-7b59-4ed9-8fb7-871d6107a272 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 990.096196] env[65758]: WARNING neutronclient.v2_0.client [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 990.098045] env[65758]: WARNING openstack [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 990.098045] env[65758]: WARNING openstack [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 990.113335] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "6981b99e-8e9f-459a-b356-9ed726c268ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.113710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "6981b99e-8e9f-459a-b356-9ed726c268ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.113966] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "6981b99e-8e9f-459a-b356-9ed726c268ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.114207] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "6981b99e-8e9f-459a-b356-9ed726c268ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.114365] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "6981b99e-8e9f-459a-b356-9ed726c268ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.118392] env[65758]: INFO nova.compute.manager [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Terminating instance [ 990.154391] env[65758]: INFO nova.compute.manager [-] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Took 1.39 seconds to deallocate network for instance. [ 990.205338] env[65758]: DEBUG nova.network.neutron [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance_info_cache with network_info: [{"id": "1a0cdbf3-b230-4f89-999a-4886f142722c", "address": "fa:16:3e:a2:d0:8b", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0cdbf3-b2", "ovs_interfaceid": "1a0cdbf3-b230-4f89-999a-4886f142722c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 990.261189] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660906, 'name': ReconfigVM_Task, 'duration_secs': 0.390611} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.262475] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Reconfigured VM instance instance-00000056 to attach disk [datastore1] volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f/volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.269497] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fc3f10a-1b53-4287-b1f6-4d97d79ba993 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.286492] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 990.286492] env[65758]: value = "task-4660908" [ 990.286492] env[65758]: _type = "Task" [ 990.286492] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.301043] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660908, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.312226] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cae09fe-b21b-4540-8f3e-dd44ea5b43ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.322865] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbc5fe4-6a06-43e0-be30-f1a2b17d1941 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.358161] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17321578-5305-4640-9143-a2ec2c5c4416 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.367926] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3059222a-d9d6-44b9-8334-c86e51dcee95 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.385846] env[65758]: DEBUG nova.compute.provider_tree [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.452947] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660907, 'name': ReconfigVM_Task, 'duration_secs': 0.347055} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.453246] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 990.458254] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b92ed5b2-cd37-4add-9e59-abccce170b8f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.477466] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 990.477466] env[65758]: value = "task-4660909" [ 990.477466] env[65758]: _type = "Task" [ 990.477466] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.489461] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660909, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.624931] env[65758]: DEBUG nova.compute.manager [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 990.625281] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 990.626232] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea4ced5-4bff-4cbe-bd75-fce057854ef4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.634455] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.635178] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa6976f4-a6ec-4538-a6d3-f6524d154acd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.645508] env[65758]: DEBUG oslo_vmware.api [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 990.645508] env[65758]: value = "task-4660910" [ 990.645508] env[65758]: _type = "Task" [ 990.645508] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.659865] env[65758]: DEBUG oslo_vmware.api [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.670193] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.709942] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-3ff9192b-3956-49f6-afd2-827759826056" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.710243] env[65758]: DEBUG nova.objects.instance [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lazy-loading 'migration_context' on Instance uuid 3ff9192b-3956-49f6-afd2-827759826056 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.800991] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660908, 'name': ReconfigVM_Task, 'duration_secs': 0.208766} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.802093] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909979', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'name': 'volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ce11868-fee2-40d3-9433-7bc398a1f756', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'serial': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 990.802818] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6c6334a-1914-4992-a68b-eddff10d5efd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.812511] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 990.812511] env[65758]: value = "task-4660911" [ 990.812511] env[65758]: _type = "Task" [ 990.812511] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.823505] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660911, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.888567] env[65758]: DEBUG nova.scheduler.client.report [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.992086] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660909, 'name': ReconfigVM_Task, 'duration_secs': 0.296285} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.992086] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909994', 'volume_id': '0659d25b-87ba-47b1-be68-7ea7275165bb', 'name': 'volume-0659d25b-87ba-47b1-be68-7ea7275165bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a', 'attached_at': '', 'detached_at': '', 'volume_id': '0659d25b-87ba-47b1-be68-7ea7275165bb', 'serial': '0659d25b-87ba-47b1-be68-7ea7275165bb'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 990.992238] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 990.993586] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527f483d-ecce-4f88-8d14-b751cff4385c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.001045] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.001343] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3cc8373-cf06-48da-87d6-56813c0413c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.081514] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.081824] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.082051] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleting the datastore file [datastore2] e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.082468] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9e38f53-1cb3-4c81-9a00-40116555fcee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.091382] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 991.091382] env[65758]: value = "task-4660913" [ 991.091382] env[65758]: _type = "Task" [ 991.091382] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.100297] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660913, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.158844] env[65758]: DEBUG oslo_vmware.api [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660910, 'name': PowerOffVM_Task, 'duration_secs': 0.23598} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.158844] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.158844] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.158844] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a96c5bf-20ca-45c0-89a0-474349961670 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.213779] env[65758]: DEBUG nova.objects.base [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Object Instance<3ff9192b-3956-49f6-afd2-827759826056> lazy-loaded attributes: info_cache,migration_context {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 991.215177] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30e4149-670b-4d72-8c21-ef11cd934819 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.237319] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b2f3afb-78a0-4038-b5c5-c7ed5fbb1a34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.246461] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.246888] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.247157] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleting the datastore file [datastore1] 6981b99e-8e9f-459a-b356-9ed726c268ed {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.249229] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d1e3fd8-88c8-4fe5-887c-ad82bd85e937 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.252087] env[65758]: DEBUG oslo_vmware.api [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 991.252087] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5240273b-aae0-96dd-8878-dbe78f8f3aae" [ 991.252087] env[65758]: _type = "Task" [ 991.252087] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.260365] env[65758]: DEBUG oslo_vmware.api [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 991.260365] env[65758]: value = "task-4660915" [ 991.260365] env[65758]: _type = "Task" [ 991.260365] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.268179] env[65758]: DEBUG oslo_vmware.api [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5240273b-aae0-96dd-8878-dbe78f8f3aae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.274884] env[65758]: DEBUG oslo_vmware.api [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.323726] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660911, 'name': Rename_Task, 'duration_secs': 0.170227} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.324174] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.324488] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf895177-1d6d-4617-aa1e-b1bc9cc25480 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.332275] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 991.332275] env[65758]: value = "task-4660916" [ 991.332275] env[65758]: _type = "Task" [ 991.332275] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.341318] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660916, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.394587] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.092s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.395160] env[65758]: DEBUG nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 991.398351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.805s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.404101] env[65758]: INFO nova.compute.claims [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.603419] env[65758]: DEBUG oslo_vmware.api [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4660913, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.383104} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.603718] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.603907] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 991.605048] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 991.605048] env[65758]: INFO nova.compute.manager [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Took 2.39 seconds to destroy the instance on the hypervisor. [ 991.605048] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 991.605048] env[65758]: DEBUG nova.compute.manager [-] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 991.605048] env[65758]: DEBUG nova.network.neutron [-] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 991.605048] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 991.605579] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 991.605873] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 991.675031] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 991.764430] env[65758]: DEBUG oslo_vmware.api [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5240273b-aae0-96dd-8878-dbe78f8f3aae, 'name': SearchDatastore_Task, 'duration_secs': 0.019425} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.769971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.777795] env[65758]: DEBUG oslo_vmware.api [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.406698} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.778103] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.778321] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 991.778456] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 991.778630] env[65758]: INFO nova.compute.manager [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Took 1.15 seconds to destroy the instance on the hypervisor. [ 991.779124] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 991.779212] env[65758]: DEBUG nova.compute.manager [-] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 991.779362] env[65758]: DEBUG nova.network.neutron [-] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 991.779745] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 991.780163] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 991.780423] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 991.843534] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660916, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.912386] env[65758]: DEBUG nova.compute.utils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 991.916328] env[65758]: DEBUG nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 991.916665] env[65758]: DEBUG nova.network.neutron [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 991.917190] env[65758]: WARNING neutronclient.v2_0.client [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 991.919972] env[65758]: WARNING neutronclient.v2_0.client [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 991.919972] env[65758]: WARNING openstack [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 991.919972] env[65758]: WARNING openstack [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 991.960151] env[65758]: DEBUG nova.compute.manager [req-239a2b4f-1bbc-4f3c-aa5d-a6619df8f416 req-f8da8911-2154-43e2-a86d-886bc62baea8 service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Received event network-vif-deleted-ee605185-7c6b-4822-9ed0-b866f77e3500 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 991.960151] env[65758]: INFO nova.compute.manager [req-239a2b4f-1bbc-4f3c-aa5d-a6619df8f416 req-f8da8911-2154-43e2-a86d-886bc62baea8 service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Neutron deleted interface ee605185-7c6b-4822-9ed0-b866f77e3500; detaching it from the instance and deleting it from the info cache [ 991.960764] env[65758]: DEBUG nova.network.neutron [req-239a2b4f-1bbc-4f3c-aa5d-a6619df8f416 req-f8da8911-2154-43e2-a86d-886bc62baea8 service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 991.970180] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 992.086455] env[65758]: DEBUG nova.policy [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6b7220ea9a34475879748959534988d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2440f1694fe4b87a9827f6653ff2e4c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 992.347183] env[65758]: DEBUG oslo_vmware.api [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4660916, 'name': PowerOnVM_Task, 'duration_secs': 0.74914} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.347921] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 992.348319] env[65758]: INFO nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Took 10.38 seconds to spawn the instance on the hypervisor. [ 992.348633] env[65758]: DEBUG nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 992.349639] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9a1e1a-5172-484c-88e7-e2e88ade2090 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.408161] env[65758]: DEBUG nova.compute.manager [req-e33f5cfc-d693-45d7-acee-256f2d4e6f8f req-d3d3cf70-219d-40ca-a2b5-45353337fb44 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Received event network-vif-deleted-91228388-8d10-417b-b79f-c45873d878e3 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 992.408389] env[65758]: INFO nova.compute.manager [req-e33f5cfc-d693-45d7-acee-256f2d4e6f8f req-d3d3cf70-219d-40ca-a2b5-45353337fb44 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Neutron deleted interface 91228388-8d10-417b-b79f-c45873d878e3; detaching it from the instance and deleting it from the info cache [ 992.408711] env[65758]: DEBUG nova.network.neutron [req-e33f5cfc-d693-45d7-acee-256f2d4e6f8f req-d3d3cf70-219d-40ca-a2b5-45353337fb44 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 992.429305] env[65758]: DEBUG nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 992.433782] env[65758]: DEBUG nova.network.neutron [-] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 992.464773] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4078bec-4d4d-4184-8769-e6d0a0d03b1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.477921] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d04f34-c2f4-4b91-b4e7-85a05900a63f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.532738] env[65758]: DEBUG nova.compute.manager [req-239a2b4f-1bbc-4f3c-aa5d-a6619df8f416 req-f8da8911-2154-43e2-a86d-886bc62baea8 service nova] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Detach interface failed, port_id=ee605185-7c6b-4822-9ed0-b866f77e3500, reason: Instance e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 992.651444] env[65758]: DEBUG nova.network.neutron [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Successfully created port: 2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 992.848337] env[65758]: DEBUG nova.network.neutron [-] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 992.857614] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fc82e1-7fa0-4f19-834e-e4b1bda741f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.877142] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bd9b01-df19-4bed-ae4e-23d388bdce90 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.882782] env[65758]: INFO nova.compute.manager [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Took 27.44 seconds to build instance. [ 992.915644] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4383127c-15aa-4b65-b1a2-783b1c4f07d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.916927] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ca0c24-2328-4540-9db7-668e821f731e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.928317] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccc3efc-e41b-4b7a-9fa2-cc12cf88892f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.938126] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fa0c3f-a7df-4e91-9d98-660531e86b50 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.953940] env[65758]: INFO nova.compute.manager [-] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Took 1.35 seconds to deallocate network for instance. [ 992.968196] env[65758]: DEBUG nova.compute.provider_tree [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.997221] env[65758]: DEBUG nova.compute.manager [req-e33f5cfc-d693-45d7-acee-256f2d4e6f8f req-d3d3cf70-219d-40ca-a2b5-45353337fb44 service nova] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Detach interface failed, port_id=91228388-8d10-417b-b79f-c45873d878e3, reason: Instance 6981b99e-8e9f-459a-b356-9ed726c268ed could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 993.352363] env[65758]: INFO nova.compute.manager [-] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Took 1.57 seconds to deallocate network for instance. [ 993.385755] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9180c1e8-4abc-47a0-9a94-e554b4d68f84 tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "0ce11868-fee2-40d3-9433-7bc398a1f756" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.489s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.471234] env[65758]: DEBUG nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 993.474698] env[65758]: DEBUG nova.scheduler.client.report [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.509728] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 993.509991] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 993.510396] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 993.510673] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 993.510825] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 993.510966] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 993.511185] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 993.511414] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 993.511584] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 993.511744] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 993.511949] env[65758]: DEBUG nova.virt.hardware [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 993.513185] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c927ed-42d5-467c-b93f-fe78eef3b009 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.523968] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fe4582-dcc6-41bf-9eba-2e458ffd264a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.531027] env[65758]: INFO nova.compute.manager [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Took 0.57 seconds to detach 1 volumes for instance. [ 993.858871] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.979988] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.980552] env[65758]: DEBUG nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 993.983445] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.740s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.985726] env[65758]: INFO nova.compute.claims [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.046093] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.204196] env[65758]: DEBUG nova.compute.manager [req-ccf380ef-d5c2-4039-95a6-f7f4c4f23c92 req-ba5ec8f3-adf9-4724-8db4-3df9f6a680bc service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received event network-vif-plugged-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 994.204488] env[65758]: DEBUG oslo_concurrency.lockutils [req-ccf380ef-d5c2-4039-95a6-f7f4c4f23c92 req-ba5ec8f3-adf9-4724-8db4-3df9f6a680bc service nova] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.204777] env[65758]: DEBUG oslo_concurrency.lockutils [req-ccf380ef-d5c2-4039-95a6-f7f4c4f23c92 req-ba5ec8f3-adf9-4724-8db4-3df9f6a680bc service nova] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.204816] env[65758]: DEBUG oslo_concurrency.lockutils [req-ccf380ef-d5c2-4039-95a6-f7f4c4f23c92 req-ba5ec8f3-adf9-4724-8db4-3df9f6a680bc service nova] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.204981] env[65758]: DEBUG nova.compute.manager [req-ccf380ef-d5c2-4039-95a6-f7f4c4f23c92 req-ba5ec8f3-adf9-4724-8db4-3df9f6a680bc service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] No waiting events found dispatching network-vif-plugged-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 994.205262] env[65758]: WARNING nova.compute.manager [req-ccf380ef-d5c2-4039-95a6-f7f4c4f23c92 req-ba5ec8f3-adf9-4724-8db4-3df9f6a680bc service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received unexpected event network-vif-plugged-2adc4687-14f6-4742-8afd-a86473befd61 for instance with vm_state building and task_state spawning. [ 994.289609] env[65758]: DEBUG nova.network.neutron [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Successfully updated port: 2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 994.444455] env[65758]: DEBUG nova.compute.manager [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Received event network-changed-f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 994.444720] env[65758]: DEBUG nova.compute.manager [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Refreshing instance network info cache due to event network-changed-f7953062-77ac-411d-9809-b817fca06bbb. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 994.444961] env[65758]: DEBUG oslo_concurrency.lockutils [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] Acquiring lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.445265] env[65758]: DEBUG oslo_concurrency.lockutils [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] Acquired lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.445476] env[65758]: DEBUG nova.network.neutron [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Refreshing network info cache for port f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 994.492401] env[65758]: DEBUG nova.compute.utils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 994.495831] env[65758]: DEBUG nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 994.496079] env[65758]: DEBUG nova.network.neutron [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 994.499524] env[65758]: WARNING neutronclient.v2_0.client [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 994.499908] env[65758]: WARNING neutronclient.v2_0.client [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 994.500525] env[65758]: WARNING openstack [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 994.500901] env[65758]: WARNING openstack [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 994.516336] env[65758]: DEBUG nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 994.595712] env[65758]: DEBUG nova.policy [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '611e0700106a462bbc20b7026c325d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eaa7a78affb743fe9a31cb24f537f30c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 994.792912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.792912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.792912] env[65758]: DEBUG nova.network.neutron [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 994.953443] env[65758]: WARNING neutronclient.v2_0.client [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 994.954660] env[65758]: WARNING openstack [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 994.954660] env[65758]: WARNING openstack [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 995.007817] env[65758]: DEBUG nova.network.neutron [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Successfully created port: e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 995.029583] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06eb1e1-f8f6-4097-9375-8a55ff01281d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.040487] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e2c72d-77b9-4885-85f8-4e1eef3c4911 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.082310] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d556bd-6180-46ab-add0-3446c0b5e5ef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.094254] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c181d7e9-e8a0-4763-8dbc-ccc6cfb14efd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.110287] env[65758]: DEBUG nova.compute.provider_tree [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.156136] env[65758]: WARNING neutronclient.v2_0.client [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 995.159348] env[65758]: WARNING openstack [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 995.159348] env[65758]: WARNING openstack [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 995.252583] env[65758]: DEBUG nova.network.neutron [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Updated VIF entry in instance network info cache for port f7953062-77ac-411d-9809-b817fca06bbb. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 995.253380] env[65758]: DEBUG nova.network.neutron [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Updating instance_info_cache with network_info: [{"id": "f7953062-77ac-411d-9809-b817fca06bbb", "address": "fa:16:3e:59:27:00", "network": {"id": "7d47c1f7-d953-4e8d-ae38-58ba035ee5ba", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-335033115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3efa562362e94a48851ef7efa8c35123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7953062-77", "ovs_interfaceid": "f7953062-77ac-411d-9809-b817fca06bbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 995.296464] env[65758]: WARNING openstack [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 995.296899] env[65758]: WARNING openstack [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 995.334344] env[65758]: DEBUG nova.network.neutron [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 995.392890] env[65758]: WARNING neutronclient.v2_0.client [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 995.393623] env[65758]: WARNING openstack [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 995.393992] env[65758]: WARNING openstack [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 995.470518] env[65758]: DEBUG nova.network.neutron [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [{"id": "2adc4687-14f6-4742-8afd-a86473befd61", "address": "fa:16:3e:63:9e:d9", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc4687-14", "ovs_interfaceid": "2adc4687-14f6-4742-8afd-a86473befd61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 995.529673] env[65758]: DEBUG nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 995.556447] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 995.556688] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.556842] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 995.557032] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.557184] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 995.557330] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 995.557533] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 995.557694] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 995.557856] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 995.558024] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 995.558206] env[65758]: DEBUG nova.virt.hardware [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 995.559144] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19fa5c1-d086-4a17-b608-88347e85d7b3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.567913] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03126e57-7391-4f0e-9698-5e2637a24b8c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.613209] env[65758]: DEBUG nova.scheduler.client.report [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.756383] env[65758]: DEBUG oslo_concurrency.lockutils [req-ec7e853a-3cc0-421f-83f1-9209f362f25b req-65ab9a48-ca6d-46dc-8271-5f5e3d76b6df service nova] Releasing lock "refresh_cache-0ce11868-fee2-40d3-9433-7bc398a1f756" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.973343] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.975981] env[65758]: DEBUG nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Instance network_info: |[{"id": "2adc4687-14f6-4742-8afd-a86473befd61", "address": "fa:16:3e:63:9e:d9", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc4687-14", "ovs_interfaceid": "2adc4687-14f6-4742-8afd-a86473befd61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 995.975981] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:9e:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2adc4687-14f6-4742-8afd-a86473befd61', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.983378] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating folder: Project (e2440f1694fe4b87a9827f6653ff2e4c). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.983786] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a32561d-292d-419e-ac6b-791a6808effc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.000390] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Created folder: Project (e2440f1694fe4b87a9827f6653ff2e4c) in parent group-v909763. [ 996.000773] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating folder: Instances. Parent ref: group-v909998. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 996.001201] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bd6a302-3075-4528-b6c4-478e9fd06cf0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.014463] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Created folder: Instances in parent group-v909998. [ 996.014877] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 996.015129] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 996.015375] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-030d1475-9da2-486f-a5d2-6543285d1f1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.039145] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 996.039145] env[65758]: value = "task-4660919" [ 996.039145] env[65758]: _type = "Task" [ 996.039145] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.052903] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660919, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.118530] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.135s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.119307] env[65758]: DEBUG nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 996.123770] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.722s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.126160] env[65758]: INFO nova.compute.claims [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.229230] env[65758]: DEBUG nova.compute.manager [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received event network-changed-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 996.229443] env[65758]: DEBUG nova.compute.manager [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Refreshing instance network info cache due to event network-changed-2adc4687-14f6-4742-8afd-a86473befd61. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 996.229700] env[65758]: DEBUG oslo_concurrency.lockutils [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] Acquiring lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.229857] env[65758]: DEBUG oslo_concurrency.lockutils [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] Acquired lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.230501] env[65758]: DEBUG nova.network.neutron [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Refreshing network info cache for port 2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 996.525435] env[65758]: DEBUG nova.network.neutron [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Successfully updated port: e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 996.551157] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660919, 'name': CreateVM_Task, 'duration_secs': 0.397708} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.552021] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 996.552021] env[65758]: WARNING neutronclient.v2_0.client [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 996.552786] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.552980] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.553927] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 996.554228] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-230850e1-ee82-44c8-bd45-c0e8eaab771a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.561378] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 996.561378] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52665538-9bb2-84ba-4c3e-ef6ae11d25c7" [ 996.561378] env[65758]: _type = "Task" [ 996.561378] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.570321] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52665538-9bb2-84ba-4c3e-ef6ae11d25c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.632303] env[65758]: DEBUG nova.compute.utils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 996.636621] env[65758]: DEBUG nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 996.636834] env[65758]: DEBUG nova.network.neutron [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 996.638159] env[65758]: WARNING neutronclient.v2_0.client [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 996.638159] env[65758]: WARNING neutronclient.v2_0.client [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 996.638159] env[65758]: WARNING openstack [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 996.638547] env[65758]: WARNING openstack [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 996.702412] env[65758]: DEBUG nova.policy [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 996.736817] env[65758]: WARNING neutronclient.v2_0.client [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 996.736817] env[65758]: WARNING openstack [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 996.736817] env[65758]: WARNING openstack [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 997.011240] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02705546-784c-41f7-ac6c-a4f10e26498a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.021065] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4994c854-ac1d-47f7-9a68-0999f4434a36 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.028015] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquiring lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.028394] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquired lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.028675] env[65758]: DEBUG nova.network.neutron [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 997.073343] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8b253e-e3ba-429a-b4b3-9121cc3300b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.086663] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c43c45-4e22-45e0-bb43-2be2b85b2a63 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.090754] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52665538-9bb2-84ba-4c3e-ef6ae11d25c7, 'name': SearchDatastore_Task, 'duration_secs': 0.032536} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.091928] env[65758]: WARNING neutronclient.v2_0.client [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 997.092548] env[65758]: WARNING openstack [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 997.092899] env[65758]: WARNING openstack [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 997.101071] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.101325] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.101556] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.101697] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.101872] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.102661] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e592c46-15bc-4e09-a866-b942c6e24f77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.114043] env[65758]: DEBUG nova.compute.provider_tree [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.117469] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.117820] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.118825] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8db4f67b-8eac-49e9-9bce-e58331aa5900 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.125093] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 997.125093] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ff0b13-0fab-f088-f7d6-1f29e5b50599" [ 997.125093] env[65758]: _type = "Task" [ 997.125093] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.130335] env[65758]: DEBUG nova.network.neutron [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Successfully created port: e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 997.143022] env[65758]: DEBUG nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 997.145464] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ff0b13-0fab-f088-f7d6-1f29e5b50599, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.475562] env[65758]: DEBUG nova.network.neutron [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updated VIF entry in instance network info cache for port 2adc4687-14f6-4742-8afd-a86473befd61. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 997.475954] env[65758]: DEBUG nova.network.neutron [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [{"id": "2adc4687-14f6-4742-8afd-a86473befd61", "address": "fa:16:3e:63:9e:d9", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc4687-14", "ovs_interfaceid": "2adc4687-14f6-4742-8afd-a86473befd61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 997.533264] env[65758]: WARNING openstack [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 997.533803] env[65758]: WARNING openstack [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 997.620459] env[65758]: DEBUG nova.scheduler.client.report [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 997.635384] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ff0b13-0fab-f088-f7d6-1f29e5b50599, 'name': SearchDatastore_Task, 'duration_secs': 0.037916} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.636406] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac93903b-9e83-4f57-b7e9-69b605b7815f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.645513] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 997.645513] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525170ff-2e27-8a8c-dad9-92f77f645459" [ 997.645513] env[65758]: _type = "Task" [ 997.645513] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.661566] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525170ff-2e27-8a8c-dad9-92f77f645459, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.684019] env[65758]: DEBUG nova.network.neutron [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 997.979246] env[65758]: DEBUG oslo_concurrency.lockutils [req-9b9a5c63-dff3-45ed-9a96-b9d5917a1803 req-d862b15f-31a2-4d06-8548-df90648d164b service nova] Releasing lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.129585] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.003s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.129585] env[65758]: DEBUG nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 998.132185] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.462s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.132642] env[65758]: DEBUG nova.objects.instance [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lazy-loading 'resources' on Instance uuid a6ed7451-7b59-4ed9-8fb7-871d6107a272 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.160672] env[65758]: DEBUG nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 998.163198] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525170ff-2e27-8a8c-dad9-92f77f645459, 'name': SearchDatastore_Task, 'duration_secs': 0.027345} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.164185] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.164812] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24/afc1eb16-c275-4b3b-a7fe-9938d2241e24.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.165248] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f03b801e-5f66-414e-ab75-0fa6faac5abc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.177026] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 998.177026] env[65758]: value = "task-4660920" [ 998.177026] env[65758]: _type = "Task" [ 998.177026] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.186825] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 998.193869] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 998.194531] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 998.194855] env[65758]: DEBUG nova.virt.hardware [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 998.195802] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9f471c-638c-4ef7-8a29-069656e7fd1c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.204543] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996a6f3d-cdd2-4536-bf4d-c0421bc78410 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.637560] env[65758]: DEBUG nova.compute.utils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 998.643442] env[65758]: DEBUG nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 998.647181] env[65758]: DEBUG nova.network.neutron [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 998.647181] env[65758]: WARNING neutronclient.v2_0.client [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 998.647181] env[65758]: WARNING neutronclient.v2_0.client [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 998.647181] env[65758]: WARNING openstack [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 998.647181] env[65758]: WARNING openstack [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 998.697161] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660920, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.744212] env[65758]: DEBUG nova.network.neutron [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Successfully updated port: e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 998.912964] env[65758]: DEBUG nova.policy [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb6583c78fea4a38b307eeade569e89f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b57a558b5f2b410e8d91d07056bf997f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 998.937843] env[65758]: WARNING neutronclient.v2_0.client [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 998.938320] env[65758]: WARNING openstack [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 998.938705] env[65758]: WARNING openstack [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 999.012764] env[65758]: DEBUG nova.compute.manager [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Received event network-vif-plugged-e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 999.012996] env[65758]: DEBUG oslo_concurrency.lockutils [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Acquiring lock "37bae4b3-6959-4f44-8600-26a4f859103c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.013767] env[65758]: DEBUG oslo_concurrency.lockutils [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Lock "37bae4b3-6959-4f44-8600-26a4f859103c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.013981] env[65758]: DEBUG oslo_concurrency.lockutils [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Lock "37bae4b3-6959-4f44-8600-26a4f859103c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.014177] env[65758]: DEBUG nova.compute.manager [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] No waiting events found dispatching network-vif-plugged-e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 999.014622] env[65758]: WARNING nova.compute.manager [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Received unexpected event network-vif-plugged-e953f008-edba-4efb-8764-649f24572836 for instance with vm_state building and task_state spawning. [ 999.014879] env[65758]: DEBUG nova.compute.manager [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Received event network-changed-e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 999.015504] env[65758]: DEBUG nova.compute.manager [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Refreshing instance network info cache due to event network-changed-e953f008-edba-4efb-8764-649f24572836. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 999.015504] env[65758]: DEBUG oslo_concurrency.lockutils [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Acquiring lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.053045] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f5ad70-1d84-428a-b9e9-523f01eb95ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.062326] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf78132-7924-4e2a-a1b4-dbe9d555eafa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.094568] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f6943a-0149-4688-8305-0bb6ed55e8c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.104516] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12a2da2-4f35-480d-a96c-aa0b90b90360 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.119437] env[65758]: DEBUG nova.compute.provider_tree [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.143775] env[65758]: DEBUG nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 999.192086] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731362} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.192086] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24/afc1eb16-c275-4b3b-a7fe-9938d2241e24.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 999.192086] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 999.192086] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d2d5231-d3e1-4a55-b7eb-efe7b7a97847 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.200744] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 999.200744] env[65758]: value = "task-4660921" [ 999.200744] env[65758]: _type = "Task" [ 999.200744] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.210202] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660921, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.247180] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.247411] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.247788] env[65758]: DEBUG nova.network.neutron [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 999.382332] env[65758]: DEBUG nova.network.neutron [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Successfully created port: 246d47d4-7e93-44b1-8daa-1bab668be0e5 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 999.416388] env[65758]: DEBUG nova.network.neutron [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Updating instance_info_cache with network_info: [{"id": "e953f008-edba-4efb-8764-649f24572836", "address": "fa:16:3e:29:3d:28", "network": {"id": "d166daf6-9504-46bc-a105-277a3ed6341e", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1828106829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaa7a78affb743fe9a31cb24f537f30c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape953f008-ed", "ovs_interfaceid": "e953f008-edba-4efb-8764-649f24572836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 999.623735] env[65758]: DEBUG nova.scheduler.client.report [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.711621] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660921, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071557} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.713127] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 999.715589] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d252e073-7b66-45e5-becf-0f7f6b11ece6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.741696] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24/afc1eb16-c275-4b3b-a7fe-9938d2241e24.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.742478] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b1019f5-7a8d-400d-8996-37c8dcd4b0ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.758363] env[65758]: WARNING openstack [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 999.759026] env[65758]: WARNING openstack [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 999.773111] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 999.773111] env[65758]: value = "task-4660922" [ 999.773111] env[65758]: _type = "Task" [ 999.773111] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.783792] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660922, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.828192] env[65758]: DEBUG nova.network.neutron [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 999.919647] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Releasing lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.920094] env[65758]: DEBUG nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Instance network_info: |[{"id": "e953f008-edba-4efb-8764-649f24572836", "address": "fa:16:3e:29:3d:28", "network": {"id": "d166daf6-9504-46bc-a105-277a3ed6341e", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1828106829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaa7a78affb743fe9a31cb24f537f30c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape953f008-ed", "ovs_interfaceid": "e953f008-edba-4efb-8764-649f24572836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 999.920796] env[65758]: DEBUG oslo_concurrency.lockutils [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Acquired lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.920796] env[65758]: DEBUG nova.network.neutron [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Refreshing network info cache for port e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 999.922119] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:3d:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6fab536-1e48-4d07-992a-076f0e6d089c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e953f008-edba-4efb-8764-649f24572836', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.931763] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Creating folder: Project (eaa7a78affb743fe9a31cb24f537f30c). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.932055] env[65758]: WARNING neutronclient.v2_0.client [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 999.933394] env[65758]: WARNING openstack [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 999.933784] env[65758]: WARNING openstack [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 999.941374] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b65ee83f-569d-4b49-b6e5-9345bf25ac09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.957164] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Created folder: Project (eaa7a78affb743fe9a31cb24f537f30c) in parent group-v909763. [ 999.957460] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Creating folder: Instances. Parent ref: group-v910001. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.957788] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f1d11a4-91d1-4e78-9abe-e2360109df0a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.972943] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Created folder: Instances in parent group-v910001. [ 999.973259] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 999.973597] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.973753] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8bca386-1e17-45f0-9b43-177e2c6921b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.995990] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.995990] env[65758]: value = "task-4660925" [ 999.995990] env[65758]: _type = "Task" [ 999.995990] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.005729] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660925, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.131967] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.135228] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.366s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.148788] env[65758]: WARNING neutronclient.v2_0.client [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1000.149617] env[65758]: WARNING openstack [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1000.149905] env[65758]: WARNING openstack [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1000.162350] env[65758]: DEBUG nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1000.166726] env[65758]: INFO nova.scheduler.client.report [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted allocations for instance a6ed7451-7b59-4ed9-8fb7-871d6107a272 [ 1000.195756] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1000.195756] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1000.195756] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1000.195756] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1000.195756] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1000.195756] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1000.196201] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1000.196201] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1000.196201] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1000.196374] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1000.196553] env[65758]: DEBUG nova.virt.hardware [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1000.197491] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb54222-4b6d-4981-82b7-8fc8aec38256 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.207073] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ee4db1-6a31-4211-b329-e5a07ccc0104 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.289950] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660922, 'name': ReconfigVM_Task, 'duration_secs': 0.379985} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.290505] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Reconfigured VM instance instance-00000057 to attach disk [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24/afc1eb16-c275-4b3b-a7fe-9938d2241e24.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.291285] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f024ec36-0f08-4165-b03f-ff3284beac58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.301442] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1000.301442] env[65758]: value = "task-4660926" [ 1000.301442] env[65758]: _type = "Task" [ 1000.301442] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.313490] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660926, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.439713] env[65758]: DEBUG nova.network.neutron [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1000.509146] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660925, 'name': CreateVM_Task, 'duration_secs': 0.405476} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.509443] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.514471] env[65758]: WARNING neutronclient.v2_0.client [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1000.515693] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.515693] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.515693] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1000.515693] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e3a1221-f18d-4e3e-a950-94ac1206cb29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.522628] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1000.522628] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524a8ca2-dc26-7d42-a272-5834021c723a" [ 1000.522628] env[65758]: _type = "Task" [ 1000.522628] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.532203] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a8ca2-dc26-7d42-a272-5834021c723a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.550935] env[65758]: WARNING neutronclient.v2_0.client [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1000.552017] env[65758]: WARNING openstack [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1000.552658] env[65758]: WARNING openstack [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1000.678966] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6f0f2cf7-dab4-4e1b-b83a-b4b7e04de41c tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "a6ed7451-7b59-4ed9-8fb7-871d6107a272" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.578s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.684155] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.684155] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.731108] env[65758]: DEBUG nova.network.neutron [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Updated VIF entry in instance network info cache for port e953f008-edba-4efb-8764-649f24572836. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1000.731108] env[65758]: DEBUG nova.network.neutron [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Updating instance_info_cache with network_info: [{"id": "e953f008-edba-4efb-8764-649f24572836", "address": "fa:16:3e:29:3d:28", "network": {"id": "d166daf6-9504-46bc-a105-277a3ed6341e", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1828106829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaa7a78affb743fe9a31cb24f537f30c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape953f008-ed", "ovs_interfaceid": "e953f008-edba-4efb-8764-649f24572836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1000.814045] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660926, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.941357] env[65758]: DEBUG nova.compute.manager [req-aef86c7d-d147-4a0c-9463-7342cfb30488 req-3e1fecba-a9be-492b-96dd-5e80b7c9f612 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Received event network-vif-plugged-246d47d4-7e93-44b1-8daa-1bab668be0e5 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1000.941357] env[65758]: DEBUG oslo_concurrency.lockutils [req-aef86c7d-d147-4a0c-9463-7342cfb30488 req-3e1fecba-a9be-492b-96dd-5e80b7c9f612 service nova] Acquiring lock "95509bbe-5aaf-471f-97b3-8a3085797568-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.941357] env[65758]: DEBUG oslo_concurrency.lockutils [req-aef86c7d-d147-4a0c-9463-7342cfb30488 req-3e1fecba-a9be-492b-96dd-5e80b7c9f612 service nova] Lock "95509bbe-5aaf-471f-97b3-8a3085797568-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.941357] env[65758]: DEBUG oslo_concurrency.lockutils [req-aef86c7d-d147-4a0c-9463-7342cfb30488 req-3e1fecba-a9be-492b-96dd-5e80b7c9f612 service nova] Lock "95509bbe-5aaf-471f-97b3-8a3085797568-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.941357] env[65758]: DEBUG nova.compute.manager [req-aef86c7d-d147-4a0c-9463-7342cfb30488 req-3e1fecba-a9be-492b-96dd-5e80b7c9f612 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] No waiting events found dispatching network-vif-plugged-246d47d4-7e93-44b1-8daa-1bab668be0e5 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1000.941357] env[65758]: WARNING nova.compute.manager [req-aef86c7d-d147-4a0c-9463-7342cfb30488 req-3e1fecba-a9be-492b-96dd-5e80b7c9f612 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Received unexpected event network-vif-plugged-246d47d4-7e93-44b1-8daa-1bab668be0e5 for instance with vm_state building and task_state spawning. [ 1000.949021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.949021] env[65758]: DEBUG nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Instance network_info: |[{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1000.949021] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:62:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e91b61d1-ee47-49e8-a302-26b7b0725dff', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1000.956175] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1000.959877] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1000.960584] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94c77a5a-34bf-4384-b2fc-2f539a29a22a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.995566] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1000.995566] env[65758]: value = "task-4660927" [ 1000.995566] env[65758]: _type = "Task" [ 1000.995566] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.010737] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660927, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.015177] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.015415] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.036307] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a8ca2-dc26-7d42-a272-5834021c723a, 'name': SearchDatastore_Task, 'duration_secs': 0.02074} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.039623] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.040302] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.040561] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.040706] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.040884] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.042027] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e4f2cad-cb3b-41dc-93a6-694c6cbbdd2e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.058678] env[65758]: DEBUG nova.network.neutron [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Successfully updated port: 246d47d4-7e93-44b1-8daa-1bab668be0e5 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1001.064576] env[65758]: DEBUG nova.compute.manager [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-vif-plugged-e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1001.064874] env[65758]: DEBUG oslo_concurrency.lockutils [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.065109] env[65758]: DEBUG oslo_concurrency.lockutils [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.065586] env[65758]: DEBUG oslo_concurrency.lockutils [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.065946] env[65758]: DEBUG nova.compute.manager [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] No waiting events found dispatching network-vif-plugged-e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1001.066260] env[65758]: WARNING nova.compute.manager [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received unexpected event network-vif-plugged-e91b61d1-ee47-49e8-a302-26b7b0725dff for instance with vm_state building and task_state spawning. [ 1001.066845] env[65758]: DEBUG nova.compute.manager [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-changed-e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1001.067211] env[65758]: DEBUG nova.compute.manager [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Refreshing instance network info cache due to event network-changed-e91b61d1-ee47-49e8-a302-26b7b0725dff. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1001.067598] env[65758]: DEBUG oslo_concurrency.lockutils [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Acquiring lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.067598] env[65758]: DEBUG oslo_concurrency.lockutils [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Acquired lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.068135] env[65758]: DEBUG nova.network.neutron [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Refreshing network info cache for port e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1001.075063] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.075382] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1001.077662] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20fa34f1-92d1-42a8-9c65-9fb809583e0b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.085167] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1001.085167] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52befb61-545c-3eb1-b1c7-dc6635ed1b5a" [ 1001.085167] env[65758]: _type = "Task" [ 1001.085167] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.098178] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52befb61-545c-3eb1-b1c7-dc6635ed1b5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.104734] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57053276-2681-4613-8883-dd2d81fae708 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.113443] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a90cad-a336-42cd-8c1f-309d66e2d48f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.156717] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c884fcf3-c036-4579-9701-9fc6ba357d5b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.167982] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc31fdc-e92b-49f1-ab41-cacac5c89150 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.193008] env[65758]: DEBUG nova.compute.provider_tree [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.195252] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.195252] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.195429] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.195476] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.195712] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.195756] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.200021] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1001.200021] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.236212] env[65758]: DEBUG oslo_concurrency.lockutils [req-24d69fda-1a04-4d18-8909-f2c5d4f99588 req-724aa8e7-323b-45bf-a7b8-5f72717e7d6f service nova] Releasing lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.313900] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660926, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.510020] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660927, 'name': CreateVM_Task, 'duration_secs': 0.484004} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.510182] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1001.511855] env[65758]: WARNING neutronclient.v2_0.client [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1001.511855] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.511855] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.515046] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1001.515046] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b04c7fc4-03ed-449d-ba31-99c2d255832c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.518380] env[65758]: DEBUG nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1001.521585] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1001.521585] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52033e87-bb7a-e506-9282-037dddbb1485" [ 1001.521585] env[65758]: _type = "Task" [ 1001.521585] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.531752] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52033e87-bb7a-e506-9282-037dddbb1485, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.561278] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquiring lock "refresh_cache-95509bbe-5aaf-471f-97b3-8a3085797568" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.561447] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquired lock "refresh_cache-95509bbe-5aaf-471f-97b3-8a3085797568" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.561626] env[65758]: DEBUG nova.network.neutron [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1001.577242] env[65758]: WARNING neutronclient.v2_0.client [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1001.577945] env[65758]: WARNING openstack [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1001.578319] env[65758]: WARNING openstack [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1001.596936] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52befb61-545c-3eb1-b1c7-dc6635ed1b5a, 'name': SearchDatastore_Task, 'duration_secs': 0.013416} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.597248] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c6502c4-6e95-4191-9e95-0b0f0bf51c30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.604369] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1001.604369] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c88a4c-5f08-47d9-5814-9fe9b2b053b7" [ 1001.604369] env[65758]: _type = "Task" [ 1001.604369] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.612912] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c88a4c-5f08-47d9-5814-9fe9b2b053b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.657716] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "1ff48e58-9240-466d-bec4-51394e550c34" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.658101] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "1ff48e58-9240-466d-bec4-51394e550c34" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.658440] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "1ff48e58-9240-466d-bec4-51394e550c34-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.658691] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "1ff48e58-9240-466d-bec4-51394e550c34-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.658951] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "1ff48e58-9240-466d-bec4-51394e550c34-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.661724] env[65758]: INFO nova.compute.manager [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Terminating instance [ 1001.702512] env[65758]: DEBUG nova.scheduler.client.report [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.706477] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.805084] env[65758]: WARNING neutronclient.v2_0.client [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1001.805785] env[65758]: WARNING openstack [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1001.806368] env[65758]: WARNING openstack [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1001.827975] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660926, 'name': Rename_Task, 'duration_secs': 1.184238} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.828541] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.828930] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a00db92-4ed5-438c-bc72-81c26c0eefc0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.841681] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1001.841681] env[65758]: value = "task-4660928" [ 1001.841681] env[65758]: _type = "Task" [ 1001.841681] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.855943] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.936505] env[65758]: DEBUG nova.network.neutron [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updated VIF entry in instance network info cache for port e91b61d1-ee47-49e8-a302-26b7b0725dff. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1001.936855] env[65758]: DEBUG nova.network.neutron [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1002.039383] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52033e87-bb7a-e506-9282-037dddbb1485, 'name': SearchDatastore_Task, 'duration_secs': 0.022734} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.039737] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.039989] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.040241] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.047709] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.064888] env[65758]: WARNING openstack [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.065333] env[65758]: WARNING openstack [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.102841] env[65758]: DEBUG nova.network.neutron [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1002.115862] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c88a4c-5f08-47d9-5814-9fe9b2b053b7, 'name': SearchDatastore_Task, 'duration_secs': 0.029429} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.116262] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.116540] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 37bae4b3-6959-4f44-8600-26a4f859103c/37bae4b3-6959-4f44-8600-26a4f859103c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1002.116853] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.117056] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.117293] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbc86903-e67e-4093-9fa6-dd64fb64713a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.119358] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-166d7841-84da-4ae9-b62d-ed02c39ad9ec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.130243] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1002.130243] env[65758]: value = "task-4660929" [ 1002.130243] env[65758]: _type = "Task" [ 1002.130243] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.135199] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.135437] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.136577] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d1a72cc-6a76-4a59-a6bb-a8d5c5b6c5ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.143117] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.147317] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1002.147317] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523d48f3-b57b-3919-b505-c2cfdacce332" [ 1002.147317] env[65758]: _type = "Task" [ 1002.147317] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.158146] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523d48f3-b57b-3919-b505-c2cfdacce332, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.168154] env[65758]: DEBUG nova.compute.manager [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1002.168516] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.169568] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1038c9a8-bdf9-45d2-868b-ca68e0d95115 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.178318] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.178640] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc8c1a79-aef6-4c67-b824-897e3f48bfcc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.186606] env[65758]: DEBUG oslo_vmware.api [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1002.186606] env[65758]: value = "task-4660930" [ 1002.186606] env[65758]: _type = "Task" [ 1002.186606] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.193373] env[65758]: WARNING neutronclient.v2_0.client [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1002.194555] env[65758]: WARNING openstack [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1002.194555] env[65758]: WARNING openstack [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1002.207439] env[65758]: DEBUG oslo_vmware.api [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.223850] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "9ec1ff52-7fbd-4530-9377-caeff103360b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.224197] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "9ec1ff52-7fbd-4530-9377-caeff103360b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.296932] env[65758]: DEBUG nova.network.neutron [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Updating instance_info_cache with network_info: [{"id": "246d47d4-7e93-44b1-8daa-1bab668be0e5", "address": "fa:16:3e:76:18:5e", "network": {"id": "29782e3a-5a24-4203-9a60-31ddadde5572", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-406665372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b57a558b5f2b410e8d91d07056bf997f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246d47d4-7e", "ovs_interfaceid": "246d47d4-7e93-44b1-8daa-1bab668be0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1002.359385] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660928, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.441207] env[65758]: DEBUG oslo_concurrency.lockutils [req-2923eab4-3900-4606-a82b-15d41f9b7f20 req-074437b8-cae1-46ae-b372-959f0d8bea0e service nova] Releasing lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.641503] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660929, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.660243] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523d48f3-b57b-3919-b505-c2cfdacce332, 'name': SearchDatastore_Task, 'duration_secs': 0.019326} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.661533] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e2d1fb9-3c47-453f-8675-2b19fbc4db2b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.669210] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1002.669210] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522834e8-c825-155e-e8be-4189baffe319" [ 1002.669210] env[65758]: _type = "Task" [ 1002.669210] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.678711] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522834e8-c825-155e-e8be-4189baffe319, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.697228] env[65758]: DEBUG oslo_vmware.api [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660930, 'name': PowerOffVM_Task, 'duration_secs': 0.287742} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.697228] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.697228] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.697228] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-815ff437-45b8-4bb2-98df-300e7c012275 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.715912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.581s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.719100] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.860s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.719403] env[65758]: DEBUG nova.objects.instance [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lazy-loading 'resources' on Instance uuid 6981b99e-8e9f-459a-b356-9ed726c268ed {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.727414] env[65758]: DEBUG nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1002.801042] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Releasing lock "refresh_cache-95509bbe-5aaf-471f-97b3-8a3085797568" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.801270] env[65758]: DEBUG nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Instance network_info: |[{"id": "246d47d4-7e93-44b1-8daa-1bab668be0e5", "address": "fa:16:3e:76:18:5e", "network": {"id": "29782e3a-5a24-4203-9a60-31ddadde5572", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-406665372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b57a558b5f2b410e8d91d07056bf997f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246d47d4-7e", "ovs_interfaceid": "246d47d4-7e93-44b1-8daa-1bab668be0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1002.801674] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.801882] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.802072] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleting the datastore file [datastore1] 1ff48e58-9240-466d-bec4-51394e550c34 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.802478] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:18:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '246d47d4-7e93-44b1-8daa-1bab668be0e5', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1002.810236] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Creating folder: Project (b57a558b5f2b410e8d91d07056bf997f). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1002.810552] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-290b80f0-5d0d-4b90-b065-414e8690830f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.812740] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7360ad3-04a5-4b4e-be73-211016749f1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.823673] env[65758]: DEBUG oslo_vmware.api [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1002.823673] env[65758]: value = "task-4660933" [ 1002.823673] env[65758]: _type = "Task" [ 1002.823673] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.828394] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Created folder: Project (b57a558b5f2b410e8d91d07056bf997f) in parent group-v909763. [ 1002.828641] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Creating folder: Instances. Parent ref: group-v910005. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1002.829487] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-489cb80c-ca16-4e35-8c55-4fb1b8b0271a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.835533] env[65758]: DEBUG oslo_vmware.api [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.840670] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Created folder: Instances in parent group-v910005. [ 1002.840941] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1002.841180] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1002.841410] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb434c71-3fd2-4f89-9350-6e7d6bd31d57 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.866820] env[65758]: DEBUG oslo_vmware.api [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4660928, 'name': PowerOnVM_Task, 'duration_secs': 0.55092} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.868484] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1002.868589] env[65758]: INFO nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1002.868769] env[65758]: DEBUG nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1002.869394] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.869394] env[65758]: value = "task-4660935" [ 1002.869394] env[65758]: _type = "Task" [ 1002.869394] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.869765] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957257d6-c1de-492d-874e-97023de148fd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.881546] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660935, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.979576] env[65758]: DEBUG nova.compute.manager [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Received event network-changed-246d47d4-7e93-44b1-8daa-1bab668be0e5 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1002.979576] env[65758]: DEBUG nova.compute.manager [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Refreshing instance network info cache due to event network-changed-246d47d4-7e93-44b1-8daa-1bab668be0e5. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1002.980302] env[65758]: DEBUG oslo_concurrency.lockutils [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] Acquiring lock "refresh_cache-95509bbe-5aaf-471f-97b3-8a3085797568" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.980610] env[65758]: DEBUG oslo_concurrency.lockutils [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] Acquired lock "refresh_cache-95509bbe-5aaf-471f-97b3-8a3085797568" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.980828] env[65758]: DEBUG nova.network.neutron [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Refreshing network info cache for port 246d47d4-7e93-44b1-8daa-1bab668be0e5 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1003.142839] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.606089} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.143169] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 37bae4b3-6959-4f44-8600-26a4f859103c/37bae4b3-6959-4f44-8600-26a4f859103c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.143396] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.143708] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d842541-43e9-4ba1-883a-ace53c1c786a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.151723] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1003.151723] env[65758]: value = "task-4660936" [ 1003.151723] env[65758]: _type = "Task" [ 1003.151723] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.163343] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.180729] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522834e8-c825-155e-e8be-4189baffe319, 'name': SearchDatastore_Task, 'duration_secs': 0.028216} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.181057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.182101] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 5fc4f1b8-9024-4155-b56d-56a8d08f0259/5fc4f1b8-9024-4155-b56d-56a8d08f0259.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1003.182412] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40bceae2-db70-4229-8d31-023ec08e769d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.191318] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1003.191318] env[65758]: value = "task-4660937" [ 1003.191318] env[65758]: _type = "Task" [ 1003.191318] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.201748] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.257555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.294133] env[65758]: INFO nova.scheduler.client.report [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted allocation for migration fbb0ee87-076d-4bf3-b98e-480be784f44a [ 1003.336437] env[65758]: DEBUG oslo_vmware.api [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396256} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.336876] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.337058] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.337340] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.337686] env[65758]: INFO nova.compute.manager [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1003.338066] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1003.338458] env[65758]: DEBUG nova.compute.manager [-] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1003.338604] env[65758]: DEBUG nova.network.neutron [-] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1003.338917] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1003.339525] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1003.339981] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1003.386550] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660935, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.394616] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1003.398154] env[65758]: INFO nova.compute.manager [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Took 23.06 seconds to build instance. [ 1003.485256] env[65758]: WARNING neutronclient.v2_0.client [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1003.485356] env[65758]: WARNING openstack [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1003.485694] env[65758]: WARNING openstack [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1003.575944] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723f3926-0e4d-4994-bc41-f47722e897eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.591139] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882ed827-53eb-4f1f-bc8e-56a2ee75b3e5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.640132] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3266824-9ce8-4a69-9a42-41d2f9989fe5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.650855] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60cdfe4-345d-4768-9ebc-cb1269f0ba17 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.671771] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090374} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.673031] env[65758]: DEBUG nova.compute.provider_tree [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.676634] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.677847] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35229bb5-22ae-41bc-9614-e0ae1184f581 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.701982] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 37bae4b3-6959-4f44-8600-26a4f859103c/37bae4b3-6959-4f44-8600-26a4f859103c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.706429] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa0479ef-7f2a-41a0-891f-d4fb48d7b848 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.723611] env[65758]: DEBUG nova.compute.manager [req-243ee4f6-b7e8-4f50-85e1-72cdf77fc221 req-948fe8ce-ff2f-4b62-89a6-07d45c778218 service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Received event network-vif-deleted-a2f86d86-ad34-41b7-a00d-cd72df0fb614 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1003.723937] env[65758]: INFO nova.compute.manager [req-243ee4f6-b7e8-4f50-85e1-72cdf77fc221 req-948fe8ce-ff2f-4b62-89a6-07d45c778218 service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Neutron deleted interface a2f86d86-ad34-41b7-a00d-cd72df0fb614; detaching it from the instance and deleting it from the info cache [ 1003.724039] env[65758]: DEBUG nova.network.neutron [req-243ee4f6-b7e8-4f50-85e1-72cdf77fc221 req-948fe8ce-ff2f-4b62-89a6-07d45c778218 service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1003.726192] env[65758]: WARNING neutronclient.v2_0.client [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1003.726862] env[65758]: WARNING openstack [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1003.727234] env[65758]: WARNING openstack [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1003.737801] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9da5be60-70d5-4b1a-a284-ddea714b65d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.746138] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660937, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.746138] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1003.746138] env[65758]: value = "task-4660938" [ 1003.746138] env[65758]: _type = "Task" [ 1003.746138] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.753902] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89d3a5e-a549-46d1-9cf9-1ed1b5ae26e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.768748] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660938, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.801338] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ebeb4038-1d8b-4ec2-a05f-6d492a34b28b tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 15.211s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.802609] env[65758]: DEBUG nova.compute.manager [req-243ee4f6-b7e8-4f50-85e1-72cdf77fc221 req-948fe8ce-ff2f-4b62-89a6-07d45c778218 service nova] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Detach interface failed, port_id=a2f86d86-ad34-41b7-a00d-cd72df0fb614, reason: Instance 1ff48e58-9240-466d-bec4-51394e550c34 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1003.837034] env[65758]: DEBUG nova.network.neutron [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Updated VIF entry in instance network info cache for port 246d47d4-7e93-44b1-8daa-1bab668be0e5. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1003.837611] env[65758]: DEBUG nova.network.neutron [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Updating instance_info_cache with network_info: [{"id": "246d47d4-7e93-44b1-8daa-1bab668be0e5", "address": "fa:16:3e:76:18:5e", "network": {"id": "29782e3a-5a24-4203-9a60-31ddadde5572", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-406665372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b57a558b5f2b410e8d91d07056bf997f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246d47d4-7e", "ovs_interfaceid": "246d47d4-7e93-44b1-8daa-1bab668be0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1003.884804] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660935, 'name': CreateVM_Task, 'duration_secs': 0.565826} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.885039] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1003.885705] env[65758]: WARNING neutronclient.v2_0.client [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1003.886189] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.886348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.886745] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1003.886999] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77920653-3064-4cd8-ae6f-22b2ed72a118 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.894085] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1003.894085] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528f4a77-6b7c-a1f0-bb18-a2092e46064a" [ 1003.894085] env[65758]: _type = "Task" [ 1003.894085] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.899907] env[65758]: DEBUG oslo_concurrency.lockutils [None req-bfe35c65-e427-46ef-97ce-9d3474b9c06a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.574s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.904166] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528f4a77-6b7c-a1f0-bb18-a2092e46064a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.145187] env[65758]: DEBUG nova.network.neutron [-] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1004.179180] env[65758]: DEBUG nova.scheduler.client.report [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1004.207260] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.919035} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.207260] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 5fc4f1b8-9024-4155-b56d-56a8d08f0259/5fc4f1b8-9024-4155-b56d-56a8d08f0259.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1004.207260] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.207260] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa223936-a521-4855-87f2-3b3fef17150a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.220895] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1004.220895] env[65758]: value = "task-4660939" [ 1004.220895] env[65758]: _type = "Task" [ 1004.220895] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.231346] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.257565] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.340663] env[65758]: DEBUG oslo_concurrency.lockutils [req-b58baef3-8db2-4484-a7fe-4e369a727af5 req-e384a9c7-b8b8-445b-b43c-1448923a1959 service nova] Releasing lock "refresh_cache-95509bbe-5aaf-471f-97b3-8a3085797568" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.407304] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528f4a77-6b7c-a1f0-bb18-a2092e46064a, 'name': SearchDatastore_Task, 'duration_secs': 0.053197} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.407664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.407928] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.408155] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.408312] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.408494] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.408794] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dfe4207-50c7-4d20-a27e-cd59588d0549 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.419338] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.419622] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.420459] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-692e6ab3-df4a-4241-bd3e-00a1aacf1e5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.427394] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1004.427394] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526a099f-e7cd-6e30-b78b-91e6b17b4f12" [ 1004.427394] env[65758]: _type = "Task" [ 1004.427394] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.437055] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526a099f-e7cd-6e30-b78b-91e6b17b4f12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.648036] env[65758]: INFO nova.compute.manager [-] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Took 1.31 seconds to deallocate network for instance. [ 1004.684963] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.690846] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.645s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.691145] env[65758]: DEBUG nova.objects.instance [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'resources' on Instance uuid e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.692691] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "3ff9192b-3956-49f6-afd2-827759826056" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.692928] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.693163] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "3ff9192b-3956-49f6-afd2-827759826056-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.693351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.693521] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.695713] env[65758]: INFO nova.compute.manager [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Terminating instance [ 1004.731852] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085596} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.732168] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1004.733245] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eead2f-444c-49a8-9ef4-71948abcea68 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.759736] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 5fc4f1b8-9024-4155-b56d-56a8d08f0259/5fc4f1b8-9024-4155-b56d-56a8d08f0259.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1004.760931] env[65758]: INFO nova.scheduler.client.report [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleted allocations for instance 6981b99e-8e9f-459a-b356-9ed726c268ed [ 1004.765306] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ccdb1bf-31bf-4344-930d-e7e3a6123d86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.789818] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660938, 'name': ReconfigVM_Task, 'duration_secs': 0.613312} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.792435] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 37bae4b3-6959-4f44-8600-26a4f859103c/37bae4b3-6959-4f44-8600-26a4f859103c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.792435] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1004.792435] env[65758]: value = "task-4660940" [ 1004.792435] env[65758]: _type = "Task" [ 1004.792435] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.792435] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3bba071-e57a-4835-acad-3fdd66ca1c59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.807209] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660940, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.808861] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1004.808861] env[65758]: value = "task-4660941" [ 1004.808861] env[65758]: _type = "Task" [ 1004.808861] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.822107] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660941, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.940547] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526a099f-e7cd-6e30-b78b-91e6b17b4f12, 'name': SearchDatastore_Task, 'duration_secs': 0.010771} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.941613] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8767360-f1bc-400b-81f8-3f2ba0dfa701 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.948952] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1004.948952] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e64997-1cec-58c1-89e7-44b46d0efd45" [ 1004.948952] env[65758]: _type = "Task" [ 1004.948952] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.961348] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e64997-1cec-58c1-89e7-44b46d0efd45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.011042] env[65758]: DEBUG nova.compute.manager [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received event network-changed-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1005.011334] env[65758]: DEBUG nova.compute.manager [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Refreshing instance network info cache due to event network-changed-2adc4687-14f6-4742-8afd-a86473befd61. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1005.011598] env[65758]: DEBUG oslo_concurrency.lockutils [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] Acquiring lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.011775] env[65758]: DEBUG oslo_concurrency.lockutils [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] Acquired lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.011945] env[65758]: DEBUG nova.network.neutron [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Refreshing network info cache for port 2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1005.155147] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.200307] env[65758]: DEBUG nova.compute.manager [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1005.200563] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.201609] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd1c9be-bfa9-4e16-9178-3ef8ed31f1d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.210837] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.210837] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-519deb1a-9f97-4930-9b2f-5b79714f4fef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.220725] env[65758]: DEBUG oslo_vmware.api [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1005.220725] env[65758]: value = "task-4660942" [ 1005.220725] env[65758]: _type = "Task" [ 1005.220725] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.233717] env[65758]: DEBUG oslo_vmware.api [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660942, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.274073] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "b50b7e64-6f7f-4abc-a4b1-93408a723298" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.274350] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "b50b7e64-6f7f-4abc-a4b1-93408a723298" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.287719] env[65758]: DEBUG oslo_concurrency.lockutils [None req-50fc183c-2be2-459b-bcf8-f95af1c56653 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "6981b99e-8e9f-459a-b356-9ed726c268ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.174s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.319488] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660940, 'name': ReconfigVM_Task, 'duration_secs': 0.323127} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.324952] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 5fc4f1b8-9024-4155-b56d-56a8d08f0259/5fc4f1b8-9024-4155-b56d-56a8d08f0259.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.326219] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-329f2592-c5b4-44d7-884a-a508dffb1bcd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.332765] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660941, 'name': Rename_Task, 'duration_secs': 0.222363} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.334028] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.334506] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d64626c-dde8-40c7-9f00-87bd88f897d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.342077] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1005.342077] env[65758]: value = "task-4660943" [ 1005.342077] env[65758]: _type = "Task" [ 1005.342077] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.347712] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1005.347712] env[65758]: value = "task-4660944" [ 1005.347712] env[65758]: _type = "Task" [ 1005.347712] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.359245] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660943, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.362738] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.466025] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e64997-1cec-58c1-89e7-44b46d0efd45, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.466025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.466354] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 95509bbe-5aaf-471f-97b3-8a3085797568/95509bbe-5aaf-471f-97b3-8a3085797568.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.466419] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-699d9403-293d-4c28-a197-2d69799cb98a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.479271] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1005.479271] env[65758]: value = "task-4660945" [ 1005.479271] env[65758]: _type = "Task" [ 1005.479271] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.492521] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660945, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.515882] env[65758]: WARNING neutronclient.v2_0.client [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1005.516979] env[65758]: WARNING openstack [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1005.517537] env[65758]: WARNING openstack [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1005.562579] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43168cb-55d6-45ed-86e8-a191c67e1428 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.573978] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161a5c51-3452-4d6b-a6fa-d418487c75ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.616055] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf61043-5cf9-4b6b-a1e4-71e6808f74ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.624894] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67308bcd-c391-4c9e-8bde-e69edf44e6c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.643022] env[65758]: DEBUG nova.compute.provider_tree [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.717306] env[65758]: WARNING neutronclient.v2_0.client [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1005.717876] env[65758]: WARNING openstack [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1005.718317] env[65758]: WARNING openstack [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1005.736929] env[65758]: DEBUG oslo_vmware.api [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660942, 'name': PowerOffVM_Task, 'duration_secs': 0.264704} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.737240] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.737371] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.737653] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7947b573-80c4-4264-9106-55d2d3711b4e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.777915] env[65758]: DEBUG nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1005.860555] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660943, 'name': Rename_Task, 'duration_secs': 0.213956} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.864393] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.864847] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.865019] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.865205] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleting the datastore file [datastore1] 3ff9192b-3956-49f6-afd2-827759826056 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.872060] env[65758]: DEBUG nova.network.neutron [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updated VIF entry in instance network info cache for port 2adc4687-14f6-4742-8afd-a86473befd61. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1005.872540] env[65758]: DEBUG nova.network.neutron [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [{"id": "2adc4687-14f6-4742-8afd-a86473befd61", "address": "fa:16:3e:63:9e:d9", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc4687-14", "ovs_interfaceid": "2adc4687-14f6-4742-8afd-a86473befd61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1005.874074] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09d13131-1a12-48fc-8c37-b66391e4e5eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.876021] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbe40a1c-2add-4aee-ad2b-227f980917e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.878098] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660944, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.889833] env[65758]: DEBUG oslo_vmware.api [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1005.889833] env[65758]: value = "task-4660948" [ 1005.889833] env[65758]: _type = "Task" [ 1005.889833] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.889833] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1005.889833] env[65758]: value = "task-4660947" [ 1005.889833] env[65758]: _type = "Task" [ 1005.889833] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.911305] env[65758]: DEBUG oslo_vmware.api [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.911787] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.992043] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660945, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.145997] env[65758]: DEBUG nova.scheduler.client.report [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.305314] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.361902] env[65758]: DEBUG oslo_vmware.api [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4660944, 'name': PowerOnVM_Task, 'duration_secs': 0.631634} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.362364] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1006.362666] env[65758]: INFO nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Took 10.83 seconds to spawn the instance on the hypervisor. [ 1006.362937] env[65758]: DEBUG nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1006.364133] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d40888c-1b23-423e-af42-f19175ca986a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.379642] env[65758]: DEBUG oslo_concurrency.lockutils [req-7f7fc1da-34ee-4a4d-a624-d66634080569 req-4983dcd8-9250-4233-a009-14fb4173efb5 service nova] Releasing lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.408696] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660947, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.409065] env[65758]: DEBUG oslo_vmware.api [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347736} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.409671] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.409671] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1006.409842] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1006.409926] env[65758]: INFO nova.compute.manager [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1006.410588] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1006.410588] env[65758]: DEBUG nova.compute.manager [-] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1006.410588] env[65758]: DEBUG nova.network.neutron [-] [instance: 3ff9192b-3956-49f6-afd2-827759826056] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1006.410892] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1006.411217] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1006.411554] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1006.459139] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1006.496108] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660945, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567148} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.496367] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 95509bbe-5aaf-471f-97b3-8a3085797568/95509bbe-5aaf-471f-97b3-8a3085797568.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.496585] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.496863] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1981ad5d-e152-4df6-b502-14d241117889 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.505352] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1006.505352] env[65758]: value = "task-4660949" [ 1006.505352] env[65758]: _type = "Task" [ 1006.505352] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.526434] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.652772] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.655398] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.949s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.655627] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.655798] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1006.656245] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.609s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.657709] env[65758]: INFO nova.compute.claims [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.661165] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c57808-12c4-4dd9-a941-19bfc8497be3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.671337] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53dce94-13f8-43a9-921c-18748ffbdd8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.689168] env[65758]: INFO nova.scheduler.client.report [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted allocations for instance e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a [ 1006.692375] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570c1aaa-1822-4a3a-b28f-798e3aafc1e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.706512] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9149676e-52ea-4579-bc5f-25c5bb7e32b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.744764] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177989MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1006.744924] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.889144] env[65758]: INFO nova.compute.manager [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Took 26.31 seconds to build instance. [ 1006.900165] env[65758]: DEBUG oslo_vmware.api [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4660947, 'name': PowerOnVM_Task, 'duration_secs': 0.81517} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.900844] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1006.902088] env[65758]: INFO nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1006.902088] env[65758]: DEBUG nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1006.902088] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039c91f6-82f4-4242-bbbd-6902e1e14974 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.017988] env[65758]: DEBUG nova.compute.manager [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1007.018272] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078226} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.019186] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586590ec-4253-4b7e-8f3f-92df2bf74237 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.022506] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1007.023801] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e62ed3-5b3c-41fa-b14f-69b67f8af5ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.053221] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 95509bbe-5aaf-471f-97b3-8a3085797568/95509bbe-5aaf-471f-97b3-8a3085797568.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.053781] env[65758]: DEBUG nova.compute.manager [req-3f27c663-3bc4-40b8-995b-c17bb9dfac3c req-c04c36d7-fb21-4e39-90ad-b90ec4c5edee service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Received event network-vif-deleted-1a0cdbf3-b230-4f89-999a-4886f142722c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1007.053985] env[65758]: INFO nova.compute.manager [req-3f27c663-3bc4-40b8-995b-c17bb9dfac3c req-c04c36d7-fb21-4e39-90ad-b90ec4c5edee service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Neutron deleted interface 1a0cdbf3-b230-4f89-999a-4886f142722c; detaching it from the instance and deleting it from the info cache [ 1007.054758] env[65758]: DEBUG nova.network.neutron [req-3f27c663-3bc4-40b8-995b-c17bb9dfac3c req-c04c36d7-fb21-4e39-90ad-b90ec4c5edee service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1007.055660] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f797c98a-b4a1-454b-8799-cc73b6f9287f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.077437] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1007.077437] env[65758]: value = "task-4660950" [ 1007.077437] env[65758]: _type = "Task" [ 1007.077437] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.087319] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.205233] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1d861202-97f0-4bcd-91a1-f3df9ba4b377 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.503s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.248486] env[65758]: DEBUG nova.network.neutron [-] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1007.339275] env[65758]: DEBUG nova.compute.manager [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Received event network-changed-e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1007.339275] env[65758]: DEBUG nova.compute.manager [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Refreshing instance network info cache due to event network-changed-e953f008-edba-4efb-8764-649f24572836. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1007.339275] env[65758]: DEBUG oslo_concurrency.lockutils [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] Acquiring lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.339484] env[65758]: DEBUG oslo_concurrency.lockutils [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] Acquired lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.339609] env[65758]: DEBUG nova.network.neutron [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Refreshing network info cache for port e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1007.392843] env[65758]: DEBUG oslo_concurrency.lockutils [None req-20693aa8-d03a-4988-adbe-ed42ea6bbe10 tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "37bae4b3-6959-4f44-8600-26a4f859103c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.826s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.424173] env[65758]: INFO nova.compute.manager [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Took 18.21 seconds to build instance. [ 1007.556485] env[65758]: INFO nova.compute.manager [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] instance snapshotting [ 1007.556485] env[65758]: DEBUG nova.objects.instance [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'flavor' on Instance uuid 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.571397] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54969582-1226-4d58-8b4e-c0e57c728ea0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.587117] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9189a7-7d22-4cd1-9746-9788d6b5f677 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.604953] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.633346] env[65758]: DEBUG nova.compute.manager [req-3f27c663-3bc4-40b8-995b-c17bb9dfac3c req-c04c36d7-fb21-4e39-90ad-b90ec4c5edee service nova] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Detach interface failed, port_id=1a0cdbf3-b230-4f89-999a-4886f142722c, reason: Instance 3ff9192b-3956-49f6-afd2-827759826056 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1007.751327] env[65758]: INFO nova.compute.manager [-] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Took 1.34 seconds to deallocate network for instance. [ 1007.842913] env[65758]: WARNING neutronclient.v2_0.client [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1007.844073] env[65758]: WARNING openstack [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1007.844560] env[65758]: WARNING openstack [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1007.928267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-91b11488-6d71-493b-ac2d-328b93525424 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.728s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.058135] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d776cae-0ca0-44c7-ab7d-3de504fe2880 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.064048] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbd3cf6-1736-47a9-94e2-9d4336f4ff7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.070644] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b07983-487e-4103-b47f-95dd2d5c4576 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.091988] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4f223f-4460-4b5b-8360-7a43d83e522b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.120560] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f39b0b6-477c-4ee0-a6a6-1ab5ade21c89 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.133442] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660950, 'name': ReconfigVM_Task, 'duration_secs': 0.832904} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.134906] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 95509bbe-5aaf-471f-97b3-8a3085797568/95509bbe-5aaf-471f-97b3-8a3085797568.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.135844] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f473c319-0010-4e5f-ab3e-5f64df35c768 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.143627] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0be8a1-bf5c-4291-a844-dad824fb40f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.152698] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1008.152698] env[65758]: value = "task-4660951" [ 1008.152698] env[65758]: _type = "Task" [ 1008.152698] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.170606] env[65758]: DEBUG nova.compute.provider_tree [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.180260] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660951, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.197316] env[65758]: WARNING neutronclient.v2_0.client [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1008.198035] env[65758]: WARNING openstack [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1008.198427] env[65758]: WARNING openstack [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1008.269234] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.310172] env[65758]: DEBUG nova.network.neutron [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Updated VIF entry in instance network info cache for port e953f008-edba-4efb-8764-649f24572836. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1008.310346] env[65758]: DEBUG nova.network.neutron [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Updating instance_info_cache with network_info: [{"id": "e953f008-edba-4efb-8764-649f24572836", "address": "fa:16:3e:29:3d:28", "network": {"id": "d166daf6-9504-46bc-a105-277a3ed6341e", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1828106829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaa7a78affb743fe9a31cb24f537f30c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape953f008-ed", "ovs_interfaceid": "e953f008-edba-4efb-8764-649f24572836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1008.635672] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1008.636111] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fa44aeda-bc0c-4cfb-87d3-59824c609918 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.647289] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1008.647289] env[65758]: value = "task-4660952" [ 1008.647289] env[65758]: _type = "Task" [ 1008.647289] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.661533] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660952, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.668331] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660951, 'name': Rename_Task, 'duration_secs': 0.359199} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.668622] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.668963] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1f2e7eb-c2a9-4cec-8723-9cdf0de411c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.674241] env[65758]: DEBUG nova.scheduler.client.report [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1008.679185] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1008.679185] env[65758]: value = "task-4660953" [ 1008.679185] env[65758]: _type = "Task" [ 1008.679185] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.690997] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660953, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.813381] env[65758]: DEBUG oslo_concurrency.lockutils [req-fb805a55-7880-49d6-81d9-7066e736283c req-13fa378f-5d5c-4259-9c07-16311d4e2ca2 service nova] Releasing lock "refresh_cache-37bae4b3-6959-4f44-8600-26a4f859103c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.159179] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660952, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.181422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.525s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.182103] env[65758]: DEBUG nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1009.185138] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.928s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.186654] env[65758]: INFO nova.compute.claims [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.199608] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660953, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.367368] env[65758]: DEBUG nova.compute.manager [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-changed-e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1009.367676] env[65758]: DEBUG nova.compute.manager [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Refreshing instance network info cache due to event network-changed-e91b61d1-ee47-49e8-a302-26b7b0725dff. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1009.367878] env[65758]: DEBUG oslo_concurrency.lockutils [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] Acquiring lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.368216] env[65758]: DEBUG oslo_concurrency.lockutils [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] Acquired lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.368285] env[65758]: DEBUG nova.network.neutron [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Refreshing network info cache for port e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1009.583023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.583023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.661204] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660952, 'name': CreateSnapshot_Task, 'duration_secs': 0.665404} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.661204] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1009.661325] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c188ca3-7b38-49b2-8a28-b3a3eee20cf6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.698758] env[65758]: DEBUG nova.compute.utils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1009.699977] env[65758]: DEBUG oslo_vmware.api [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660953, 'name': PowerOnVM_Task, 'duration_secs': 0.628002} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.700593] env[65758]: DEBUG nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1009.700777] env[65758]: DEBUG nova.network.neutron [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1009.701109] env[65758]: WARNING neutronclient.v2_0.client [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1009.701497] env[65758]: WARNING neutronclient.v2_0.client [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1009.702093] env[65758]: WARNING openstack [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1009.702442] env[65758]: WARNING openstack [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1009.709406] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.709647] env[65758]: INFO nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Took 9.55 seconds to spawn the instance on the hypervisor. [ 1009.709876] env[65758]: DEBUG nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1009.712420] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7040bd5-4b1c-4ee0-bce1-9cc24221ecf3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.758885] env[65758]: DEBUG nova.policy [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb1d1205496843b1bcb995e2f1b81fc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5566280dda3a4e6fa6821d7ef711c108', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1009.870968] env[65758]: WARNING neutronclient.v2_0.client [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1009.871806] env[65758]: WARNING openstack [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1009.872206] env[65758]: WARNING openstack [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1010.086410] env[65758]: DEBUG nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1010.105685] env[65758]: DEBUG nova.network.neutron [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Successfully created port: a6572553-5e5a-4fb9-9384-1cfcd168710b {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1010.161858] env[65758]: WARNING neutronclient.v2_0.client [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1010.164017] env[65758]: WARNING openstack [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1010.164017] env[65758]: WARNING openstack [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1010.186024] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1010.186024] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-46b28e25-ade0-41c1-ab7d-cff92249533c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.196456] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1010.196456] env[65758]: value = "task-4660954" [ 1010.196456] env[65758]: _type = "Task" [ 1010.196456] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.203162] env[65758]: DEBUG nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1010.219238] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660954, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.244331] env[65758]: INFO nova.compute.manager [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Took 20.86 seconds to build instance. [ 1010.319155] env[65758]: DEBUG nova.network.neutron [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updated VIF entry in instance network info cache for port e91b61d1-ee47-49e8-a302-26b7b0725dff. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1010.319155] env[65758]: DEBUG nova.network.neutron [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1010.621087] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.623407] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b34b7e5-3921-4f56-b8b6-505c45d5c844 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.636319] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1419f9e6-3614-4326-b139-1d8219ec746c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.667764] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088575ee-f425-4906-bd25-44d6fb55a17f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.677233] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43d7512-6e01-46b8-a236-e25f7b1d887d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.691546] env[65758]: DEBUG nova.compute.provider_tree [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.708594] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660954, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.747166] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b61159-7e40-4b90-b39f-a3e1b83e1f3a tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "95509bbe-5aaf-471f-97b3-8a3085797568" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.382s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.820450] env[65758]: DEBUG oslo_concurrency.lockutils [req-d417a4ec-9b41-4d35-91e6-bebc414e56d7 req-6351a98f-dcc3-4f6c-944c-02eab8b9e04a service nova] Releasing lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.196079] env[65758]: DEBUG nova.scheduler.client.report [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.212881] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660954, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.216889] env[65758]: DEBUG nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1011.219176] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "be3de9bd-da98-4c7e-ad7c-933245523695" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.219437] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "be3de9bd-da98-4c7e-ad7c-933245523695" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.219592] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "be3de9bd-da98-4c7e-ad7c-933245523695-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.219801] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "be3de9bd-da98-4c7e-ad7c-933245523695-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.219977] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "be3de9bd-da98-4c7e-ad7c-933245523695-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.222012] env[65758]: INFO nova.compute.manager [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Terminating instance [ 1011.241875] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1011.242159] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1011.242314] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1011.242814] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1011.242814] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1011.242814] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1011.243010] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1011.243122] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1011.243383] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1011.243550] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1011.243834] env[65758]: DEBUG nova.virt.hardware [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1011.244958] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b567448-5480-4677-bc8a-fb2e4f1bec82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.253908] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92faf6d2-5057-427c-b9ef-6b35095c4d3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.612777] env[65758]: DEBUG nova.compute.manager [req-45536504-53e2-4080-972f-8967f551f737 req-79b73fc5-d1f3-493e-904e-6e0abe4f419b service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Received event network-vif-plugged-a6572553-5e5a-4fb9-9384-1cfcd168710b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1011.613039] env[65758]: DEBUG oslo_concurrency.lockutils [req-45536504-53e2-4080-972f-8967f551f737 req-79b73fc5-d1f3-493e-904e-6e0abe4f419b service nova] Acquiring lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.613268] env[65758]: DEBUG oslo_concurrency.lockutils [req-45536504-53e2-4080-972f-8967f551f737 req-79b73fc5-d1f3-493e-904e-6e0abe4f419b service nova] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.613431] env[65758]: DEBUG oslo_concurrency.lockutils [req-45536504-53e2-4080-972f-8967f551f737 req-79b73fc5-d1f3-493e-904e-6e0abe4f419b service nova] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.613628] env[65758]: DEBUG nova.compute.manager [req-45536504-53e2-4080-972f-8967f551f737 req-79b73fc5-d1f3-493e-904e-6e0abe4f419b service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] No waiting events found dispatching network-vif-plugged-a6572553-5e5a-4fb9-9384-1cfcd168710b {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1011.613813] env[65758]: WARNING nova.compute.manager [req-45536504-53e2-4080-972f-8967f551f737 req-79b73fc5-d1f3-493e-904e-6e0abe4f419b service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Received unexpected event network-vif-plugged-a6572553-5e5a-4fb9-9384-1cfcd168710b for instance with vm_state building and task_state spawning. [ 1011.692614] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquiring lock "95509bbe-5aaf-471f-97b3-8a3085797568" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.692906] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "95509bbe-5aaf-471f-97b3-8a3085797568" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.693149] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquiring lock "95509bbe-5aaf-471f-97b3-8a3085797568-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.693335] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "95509bbe-5aaf-471f-97b3-8a3085797568-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.693503] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "95509bbe-5aaf-471f-97b3-8a3085797568-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.696495] env[65758]: INFO nova.compute.manager [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Terminating instance [ 1011.701293] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.702526] env[65758]: DEBUG nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1011.704217] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.549s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.704421] env[65758]: DEBUG nova.objects.instance [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lazy-loading 'resources' on Instance uuid 1ff48e58-9240-466d-bec4-51394e550c34 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.710678] env[65758]: DEBUG nova.network.neutron [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Successfully updated port: a6572553-5e5a-4fb9-9384-1cfcd168710b {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1011.721981] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660954, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.725639] env[65758]: DEBUG nova.compute.manager [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1011.726069] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1011.727086] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ed5208-7579-418f-a97f-9612bcea4396 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.736475] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.737255] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae3848dc-d967-4fac-9ecb-2cbc5f3556a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.746181] env[65758]: DEBUG oslo_vmware.api [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1011.746181] env[65758]: value = "task-4660955" [ 1011.746181] env[65758]: _type = "Task" [ 1011.746181] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.760072] env[65758]: DEBUG oslo_vmware.api [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.843124] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.843463] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.200276] env[65758]: DEBUG nova.compute.manager [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1012.200599] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.201633] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714e6e6b-f003-4964-9933-92b1bfa40f6d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.220299] env[65758]: DEBUG nova.compute.utils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1012.223132] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.223132] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquired lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.223132] env[65758]: DEBUG nova.network.neutron [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1012.224034] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660954, 'name': CloneVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.224477] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.225057] env[65758]: DEBUG nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1012.225720] env[65758]: DEBUG nova.network.neutron [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1012.225720] env[65758]: WARNING neutronclient.v2_0.client [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1012.225877] env[65758]: WARNING neutronclient.v2_0.client [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1012.226783] env[65758]: WARNING openstack [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1012.227102] env[65758]: WARNING openstack [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1012.234824] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5f6b548-ffae-4bad-a196-e1c187ae15b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.245682] env[65758]: DEBUG oslo_vmware.api [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1012.245682] env[65758]: value = "task-4660956" [ 1012.245682] env[65758]: _type = "Task" [ 1012.245682] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.266976] env[65758]: DEBUG oslo_vmware.api [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660955, 'name': PowerOffVM_Task, 'duration_secs': 0.239637} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.267354] env[65758]: DEBUG oslo_vmware.api [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.267844] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.267968] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.268316] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-831ae602-d502-4706-bfe3-0b5aad2db581 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.294521] env[65758]: DEBUG nova.policy [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '742a9f6633b54c6f8cd432ac94b59e25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e3a324879d646699f950687546ea861', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1012.344141] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.344398] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.344652] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleting the datastore file [datastore1] be3de9bd-da98-4c7e-ad7c-933245523695 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.344984] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c738a237-7197-44f3-80fe-6d124d75cad5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.348020] env[65758]: DEBUG nova.compute.utils [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1012.357494] env[65758]: DEBUG oslo_vmware.api [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1012.357494] env[65758]: value = "task-4660958" [ 1012.357494] env[65758]: _type = "Task" [ 1012.357494] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.373927] env[65758]: DEBUG oslo_vmware.api [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.561279] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439b1d4f-279c-4e51-9a9c-84633f4e484d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.569630] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c1cc62-eb3e-400b-a89f-d5de84b2b75a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.604102] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764abe1c-d9bc-4624-b21d-b24e9b75eb27 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.613307] env[65758]: DEBUG nova.network.neutron [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Successfully created port: 295a6d0b-82a8-470a-8be9-077f59f98374 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1012.616849] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae271ba9-bb2b-4420-9905-c2d72e8fb66e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.632829] env[65758]: DEBUG nova.compute.provider_tree [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.716963] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660954, 'name': CloneVM_Task, 'duration_secs': 2.116564} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.717233] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Created linked-clone VM from snapshot [ 1012.718016] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30f01dc-2ac9-41ff-9e20-543650e0d56c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.728154] env[65758]: DEBUG nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1012.731867] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Uploading image fcf58575-c665-48d7-add3-26ecbec71675 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1012.739907] env[65758]: WARNING openstack [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1012.740079] env[65758]: WARNING openstack [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1012.759951] env[65758]: DEBUG oslo_vmware.api [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660956, 'name': PowerOffVM_Task, 'duration_secs': 0.274143} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.760453] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.760672] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.760991] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10a4a89f-5901-4bac-b0c3-f639b81679e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.765159] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1012.765159] env[65758]: value = "vm-910009" [ 1012.765159] env[65758]: _type = "VirtualMachine" [ 1012.765159] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1012.765493] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2244485f-64d8-44d5-b95a-ac5581bec0b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.775451] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease: (returnval){ [ 1012.775451] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52df8da3-cf54-2caa-c0f5-e48fcc5cc516" [ 1012.775451] env[65758]: _type = "HttpNfcLease" [ 1012.775451] env[65758]: } obtained for exporting VM: (result){ [ 1012.775451] env[65758]: value = "vm-910009" [ 1012.775451] env[65758]: _type = "VirtualMachine" [ 1012.775451] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1012.775919] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the lease: (returnval){ [ 1012.775919] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52df8da3-cf54-2caa-c0f5-e48fcc5cc516" [ 1012.775919] env[65758]: _type = "HttpNfcLease" [ 1012.775919] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1012.785439] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1012.785439] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52df8da3-cf54-2caa-c0f5-e48fcc5cc516" [ 1012.785439] env[65758]: _type = "HttpNfcLease" [ 1012.785439] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1012.786769] env[65758]: DEBUG nova.network.neutron [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1012.852324] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.853014] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.853233] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.853418] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Deleting the datastore file [datastore1] 95509bbe-5aaf-471f-97b3-8a3085797568 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.853808] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75bd03af-8d22-4ff4-a12b-d6e25f462e93 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.863539] env[65758]: DEBUG oslo_vmware.api [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for the task: (returnval){ [ 1012.863539] env[65758]: value = "task-4660961" [ 1012.863539] env[65758]: _type = "Task" [ 1012.863539] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.870695] env[65758]: DEBUG oslo_vmware.api [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4660958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.449498} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.871432] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.871708] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.871890] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.872109] env[65758]: INFO nova.compute.manager [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1012.872403] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1012.872671] env[65758]: DEBUG nova.compute.manager [-] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1012.872798] env[65758]: DEBUG nova.network.neutron [-] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1012.873077] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1012.873785] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1012.874104] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1012.885259] env[65758]: DEBUG oslo_vmware.api [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.953021] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1013.139894] env[65758]: DEBUG nova.scheduler.client.report [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.286458] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1013.286458] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52df8da3-cf54-2caa-c0f5-e48fcc5cc516" [ 1013.286458] env[65758]: _type = "HttpNfcLease" [ 1013.286458] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1013.286862] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1013.286862] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52df8da3-cf54-2caa-c0f5-e48fcc5cc516" [ 1013.286862] env[65758]: _type = "HttpNfcLease" [ 1013.286862] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1013.287572] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf3b15f-97c4-406c-9d06-c4d930054544 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.296785] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b5d3b-9582-d7aa-54ec-ca331b1a9e34/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1013.296995] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b5d3b-9582-d7aa-54ec-ca331b1a9e34/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1013.375149] env[65758]: DEBUG oslo_vmware.api [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.459199] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e0212227-c906-477f-a1db-bbd0fe33bbec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.482024] env[65758]: WARNING neutronclient.v2_0.client [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1013.482798] env[65758]: WARNING openstack [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1013.483291] env[65758]: WARNING openstack [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1013.643310] env[65758]: DEBUG nova.network.neutron [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Updating instance_info_cache with network_info: [{"id": "a6572553-5e5a-4fb9-9384-1cfcd168710b", "address": "fa:16:3e:21:ca:02", "network": {"id": "729a6c0b-369b-4f6f-9ac6-aa6347bc5a19", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1604192611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5566280dda3a4e6fa6821d7ef711c108", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6572553-5e", "ovs_interfaceid": "a6572553-5e5a-4fb9-9384-1cfcd168710b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1013.647154] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.649570] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.345s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.651383] env[65758]: INFO nova.compute.claims [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.674888] env[65758]: INFO nova.scheduler.client.report [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted allocations for instance 1ff48e58-9240-466d-bec4-51394e550c34 [ 1013.742193] env[65758]: DEBUG nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1013.774181] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1013.774552] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1013.774800] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1013.775079] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1013.775316] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1013.775507] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1013.775764] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1013.775995] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1013.776258] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1013.776478] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1013.776682] env[65758]: DEBUG nova.virt.hardware [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1013.778628] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7586369f-1d22-4180-b9ca-a7a09912114f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.789784] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f93f88d-1a59-475b-aa35-820b2b271721 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.815816] env[65758]: DEBUG nova.compute.manager [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Received event network-changed-a6572553-5e5a-4fb9-9384-1cfcd168710b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1013.816265] env[65758]: DEBUG nova.compute.manager [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Refreshing instance network info cache due to event network-changed-a6572553-5e5a-4fb9-9384-1cfcd168710b. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1013.816652] env[65758]: DEBUG oslo_concurrency.lockutils [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] Acquiring lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.877107] env[65758]: DEBUG oslo_vmware.api [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Task: {'id': task-4660961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.611552} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.879197] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1013.879197] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1013.879197] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.879197] env[65758]: INFO nova.compute.manager [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1013.879596] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1013.879596] env[65758]: DEBUG nova.compute.manager [-] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1013.879688] env[65758]: DEBUG nova.network.neutron [-] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1013.880094] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1013.880825] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1013.881248] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1013.924033] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.924480] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.924873] env[65758]: INFO nova.compute.manager [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Attaching volume e29ad381-7d88-46b8-b08e-180dc4b43679 to /dev/sdb [ 1013.948466] env[65758]: DEBUG nova.network.neutron [-] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1013.980499] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c0909e-d09c-414a-a09d-08a3d5a44610 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.987293] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1013.994862] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c185fe87-474c-4556-a041-75237f0abd0c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.013927] env[65758]: DEBUG nova.virt.block_device [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating existing volume attachment record: 7ccb0dab-159e-4e1c-ba9b-04518071b516 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1014.150432] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Releasing lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.150981] env[65758]: DEBUG nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Instance network_info: |[{"id": "a6572553-5e5a-4fb9-9384-1cfcd168710b", "address": "fa:16:3e:21:ca:02", "network": {"id": "729a6c0b-369b-4f6f-9ac6-aa6347bc5a19", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1604192611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5566280dda3a4e6fa6821d7ef711c108", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6572553-5e", "ovs_interfaceid": "a6572553-5e5a-4fb9-9384-1cfcd168710b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1014.151400] env[65758]: DEBUG oslo_concurrency.lockutils [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] Acquired lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.151593] env[65758]: DEBUG nova.network.neutron [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Refreshing network info cache for port a6572553-5e5a-4fb9-9384-1cfcd168710b {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1014.154356] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:ca:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6572553-5e5a-4fb9-9384-1cfcd168710b', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.165649] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Creating folder: Project (5566280dda3a4e6fa6821d7ef711c108). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1014.169325] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18d7b0cf-bfb9-4bd2-845b-c7439cc72e06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.186221] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54a147d5-5386-4824-a1fc-4e4f380e5bab tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "1ff48e58-9240-466d-bec4-51394e550c34" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.528s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.191690] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Created folder: Project (5566280dda3a4e6fa6821d7ef711c108) in parent group-v909763. [ 1014.191960] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Creating folder: Instances. Parent ref: group-v910010. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1014.192820] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7d40261-33d7-4159-ab74-7963f92734e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.208447] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Created folder: Instances in parent group-v910010. [ 1014.208447] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1014.208447] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.208447] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcc88ad3-82ce-42ea-a96c-6ac236b5554c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.228051] env[65758]: DEBUG nova.network.neutron [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Successfully updated port: 295a6d0b-82a8-470a-8be9-077f59f98374 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1014.235803] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.235803] env[65758]: value = "task-4660965" [ 1014.235803] env[65758]: _type = "Task" [ 1014.235803] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.247910] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660965, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.352739] env[65758]: DEBUG nova.compute.manager [req-f98776ff-44d7-47e6-bddd-f1d111bb6138 req-ab13febf-743c-4cc2-8927-1936e8593bbb service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Received event network-vif-plugged-295a6d0b-82a8-470a-8be9-077f59f98374 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1014.355384] env[65758]: DEBUG oslo_concurrency.lockutils [req-f98776ff-44d7-47e6-bddd-f1d111bb6138 req-ab13febf-743c-4cc2-8927-1936e8593bbb service nova] Acquiring lock "9ec1ff52-7fbd-4530-9377-caeff103360b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.355778] env[65758]: DEBUG oslo_concurrency.lockutils [req-f98776ff-44d7-47e6-bddd-f1d111bb6138 req-ab13febf-743c-4cc2-8927-1936e8593bbb service nova] Lock "9ec1ff52-7fbd-4530-9377-caeff103360b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.355977] env[65758]: DEBUG oslo_concurrency.lockutils [req-f98776ff-44d7-47e6-bddd-f1d111bb6138 req-ab13febf-743c-4cc2-8927-1936e8593bbb service nova] Lock "9ec1ff52-7fbd-4530-9377-caeff103360b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.356277] env[65758]: DEBUG nova.compute.manager [req-f98776ff-44d7-47e6-bddd-f1d111bb6138 req-ab13febf-743c-4cc2-8927-1936e8593bbb service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] No waiting events found dispatching network-vif-plugged-295a6d0b-82a8-470a-8be9-077f59f98374 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1014.358510] env[65758]: WARNING nova.compute.manager [req-f98776ff-44d7-47e6-bddd-f1d111bb6138 req-ab13febf-743c-4cc2-8927-1936e8593bbb service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Received unexpected event network-vif-plugged-295a6d0b-82a8-470a-8be9-077f59f98374 for instance with vm_state building and task_state spawning. [ 1014.454138] env[65758]: INFO nova.compute.manager [-] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Took 1.58 seconds to deallocate network for instance. [ 1014.668467] env[65758]: WARNING neutronclient.v2_0.client [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1014.669121] env[65758]: WARNING openstack [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1014.669630] env[65758]: WARNING openstack [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1014.730923] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "refresh_cache-9ec1ff52-7fbd-4530-9377-caeff103360b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.731216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "refresh_cache-9ec1ff52-7fbd-4530-9377-caeff103360b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.731500] env[65758]: DEBUG nova.network.neutron [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1014.751130] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660965, 'name': CreateVM_Task, 'duration_secs': 0.38972} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.751331] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1014.752164] env[65758]: WARNING neutronclient.v2_0.client [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1014.753214] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.753214] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.753214] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1014.753513] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6e53936-850a-4f6e-a17a-582d44429d49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.763108] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1014.763108] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526d82ac-e3c4-3980-f7de-e63fb2540281" [ 1014.763108] env[65758]: _type = "Task" [ 1014.763108] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.777269] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526d82ac-e3c4-3980-f7de-e63fb2540281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.794326] env[65758]: DEBUG nova.network.neutron [-] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1014.913639] env[65758]: WARNING neutronclient.v2_0.client [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1014.914479] env[65758]: WARNING openstack [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1014.914931] env[65758]: WARNING openstack [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1014.961394] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.030505] env[65758]: DEBUG nova.network.neutron [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Updated VIF entry in instance network info cache for port a6572553-5e5a-4fb9-9384-1cfcd168710b. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1015.030970] env[65758]: DEBUG nova.network.neutron [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Updating instance_info_cache with network_info: [{"id": "a6572553-5e5a-4fb9-9384-1cfcd168710b", "address": "fa:16:3e:21:ca:02", "network": {"id": "729a6c0b-369b-4f6f-9ac6-aa6347bc5a19", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1604192611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5566280dda3a4e6fa6821d7ef711c108", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6572553-5e", "ovs_interfaceid": "a6572553-5e5a-4fb9-9384-1cfcd168710b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1015.066512] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.067570] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.074504] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d2596d-b3bf-4603-9d68-a9fd8804b7b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.083993] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c65172-b9f5-4838-9c8c-f7cca7fdba11 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.122898] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d02c40-f7e5-43bc-bd7c-599f95b31407 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.132570] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9623bc04-5587-4fe3-8c74-cff8ca86ec78 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.150998] env[65758]: DEBUG nova.compute.provider_tree [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.237423] env[65758]: WARNING openstack [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1015.237859] env[65758]: WARNING openstack [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1015.274243] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526d82ac-e3c4-3980-f7de-e63fb2540281, 'name': SearchDatastore_Task, 'duration_secs': 0.015252} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.274777] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.275140] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.275418] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.275577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.275736] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.276889] env[65758]: DEBUG nova.network.neutron [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1015.279332] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0914a3a-df91-465d-849f-25e16b9acf83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.295806] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.296283] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.297396] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecbffbb8-9ce3-4516-b926-3151ec063c99 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.303958] env[65758]: INFO nova.compute.manager [-] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Took 1.42 seconds to deallocate network for instance. [ 1015.310830] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1015.310830] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52792276-c1ab-82dd-83f9-65e6755d93e3" [ 1015.310830] env[65758]: _type = "Task" [ 1015.310830] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.324945] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52792276-c1ab-82dd-83f9-65e6755d93e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.374038] env[65758]: WARNING neutronclient.v2_0.client [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1015.375668] env[65758]: WARNING openstack [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1015.376241] env[65758]: WARNING openstack [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1015.473029] env[65758]: DEBUG nova.network.neutron [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Updating instance_info_cache with network_info: [{"id": "295a6d0b-82a8-470a-8be9-077f59f98374", "address": "fa:16:3e:a9:dd:fc", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap295a6d0b-82", "ovs_interfaceid": "295a6d0b-82a8-470a-8be9-077f59f98374", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1015.537508] env[65758]: DEBUG oslo_concurrency.lockutils [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] Releasing lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.537822] env[65758]: DEBUG nova.compute.manager [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Received event network-vif-deleted-30972d97-c096-41a5-b3bf-289b54c95d25 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1015.537998] env[65758]: INFO nova.compute.manager [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Neutron deleted interface 30972d97-c096-41a5-b3bf-289b54c95d25; detaching it from the instance and deleting it from the info cache [ 1015.538194] env[65758]: DEBUG nova.network.neutron [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1015.570675] env[65758]: INFO nova.compute.manager [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Detaching volume 51ed0fd6-0a9d-417e-be08-c8c05d6bcc05 [ 1015.606519] env[65758]: INFO nova.virt.block_device [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Attempting to driver detach volume 51ed0fd6-0a9d-417e-be08-c8c05d6bcc05 from mountpoint /dev/sdb [ 1015.606800] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1015.607088] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1015.608070] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831112dc-04a6-4688-b399-c38760747936 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.633728] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d381b406-947a-4ea4-8bfd-acc82b625beb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.641977] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b782ea8e-479e-4cef-9ff4-f185921de5d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.664123] env[65758]: DEBUG nova.scheduler.client.report [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1015.668519] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb77387-d134-4c7d-83d0-7f8d6663785c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.686905] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The volume has not been displaced from its original location: [datastore2] volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05/volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1015.692971] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1015.693993] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59f4a62a-0a4a-4acb-9bd9-382d7e2a1888 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.715027] env[65758]: DEBUG oslo_vmware.api [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1015.715027] env[65758]: value = "task-4660968" [ 1015.715027] env[65758]: _type = "Task" [ 1015.715027] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.724674] env[65758]: DEBUG oslo_vmware.api [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.822617] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.830697] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52792276-c1ab-82dd-83f9-65e6755d93e3, 'name': SearchDatastore_Task, 'duration_secs': 0.020884} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.831925] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12ee6911-2e91-480c-9c25-3653324550d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.839260] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1015.839260] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5276a5aa-cdea-0ff8-6a85-d2907949a6e1" [ 1015.839260] env[65758]: _type = "Task" [ 1015.839260] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.850253] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5276a5aa-cdea-0ff8-6a85-d2907949a6e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.852472] env[65758]: DEBUG nova.compute.manager [req-82cf4fe9-1ba0-4dce-846b-ca5adefcff85 req-8feee098-57df-46f6-a40f-7c6a98f0f846 service nova] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Received event network-vif-deleted-246d47d4-7e93-44b1-8daa-1bab668be0e5 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1015.976048] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "refresh_cache-9ec1ff52-7fbd-4530-9377-caeff103360b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.976492] env[65758]: DEBUG nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Instance network_info: |[{"id": "295a6d0b-82a8-470a-8be9-077f59f98374", "address": "fa:16:3e:a9:dd:fc", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap295a6d0b-82", "ovs_interfaceid": "295a6d0b-82a8-470a-8be9-077f59f98374", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1015.977027] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:dd:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '295a6d0b-82a8-470a-8be9-077f59f98374', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1015.987051] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1015.987516] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1015.987878] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-995b3f93-f036-49e7-83f6-20cf717c13e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.010082] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.010082] env[65758]: value = "task-4660969" [ 1016.010082] env[65758]: _type = "Task" [ 1016.010082] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.022234] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660969, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.041649] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3caebd2-43d0-4562-a309-5cd4da31b554 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.053035] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6890b969-596e-488c-89bf-6acdd72a6bc4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.095368] env[65758]: DEBUG nova.compute.manager [req-5627a725-7fd3-4b00-b967-bc8efe610f03 req-b4923902-b54d-40ee-b4dc-de30af98aa5d service nova] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Detach interface failed, port_id=30972d97-c096-41a5-b3bf-289b54c95d25, reason: Instance be3de9bd-da98-4c7e-ad7c-933245523695 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1016.173553] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.524s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.174262] env[65758]: DEBUG nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1016.177122] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.432s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.226904] env[65758]: DEBUG oslo_vmware.api [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660968, 'name': ReconfigVM_Task, 'duration_secs': 0.28274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.227550] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1016.232419] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ab494ae-d88a-4685-9bd9-d27b3de8b2ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.252877] env[65758]: DEBUG oslo_vmware.api [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1016.252877] env[65758]: value = "task-4660970" [ 1016.252877] env[65758]: _type = "Task" [ 1016.252877] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.266471] env[65758]: DEBUG oslo_vmware.api [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660970, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.352908] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5276a5aa-cdea-0ff8-6a85-d2907949a6e1, 'name': SearchDatastore_Task, 'duration_secs': 0.020714} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.353325] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.353668] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] fe6f2a15-f42a-4f63-8dfa-175adadf5c02/fe6f2a15-f42a-4f63-8dfa-175adadf5c02.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1016.353963] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c19a90a-c3a3-47d4-af38-b39354560f5a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.362183] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1016.362183] env[65758]: value = "task-4660971" [ 1016.362183] env[65758]: _type = "Task" [ 1016.362183] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.373863] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.394226] env[65758]: DEBUG nova.compute.manager [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Received event network-changed-295a6d0b-82a8-470a-8be9-077f59f98374 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1016.394226] env[65758]: DEBUG nova.compute.manager [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Refreshing instance network info cache due to event network-changed-295a6d0b-82a8-470a-8be9-077f59f98374. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1016.394226] env[65758]: DEBUG oslo_concurrency.lockutils [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] Acquiring lock "refresh_cache-9ec1ff52-7fbd-4530-9377-caeff103360b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.394565] env[65758]: DEBUG oslo_concurrency.lockutils [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] Acquired lock "refresh_cache-9ec1ff52-7fbd-4530-9377-caeff103360b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.394565] env[65758]: DEBUG nova.network.neutron [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Refreshing network info cache for port 295a6d0b-82a8-470a-8be9-077f59f98374 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1016.521573] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660969, 'name': CreateVM_Task, 'duration_secs': 0.38312} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.521734] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1016.522296] env[65758]: WARNING neutronclient.v2_0.client [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.522794] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.522923] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.523256] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1016.523524] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56d15189-8e50-4a30-8ac6-64d2dd3741fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.529609] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1016.529609] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52309073-f108-901a-3f2c-95c690aeebad" [ 1016.529609] env[65758]: _type = "Task" [ 1016.529609] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.539413] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52309073-f108-901a-3f2c-95c690aeebad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.681468] env[65758]: DEBUG nova.compute.utils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1016.690880] env[65758]: DEBUG nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1016.691818] env[65758]: DEBUG nova.network.neutron [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1016.691818] env[65758]: WARNING neutronclient.v2_0.client [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.691818] env[65758]: WARNING neutronclient.v2_0.client [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.692606] env[65758]: WARNING openstack [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1016.692830] env[65758]: WARNING openstack [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1016.727095] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 37aadd44-79e8-4479-862f-265549c9d802 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.727252] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ec1e2845-e73a-40ff-9b6c-1d8281859fba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.727434] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 974d06c1-2704-4a78-bbd7-f54335c4288e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.727508] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e6159a35-f073-4931-b0b0-832a88680356 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.727586] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728125] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 7f5911fb-785e-444c-9408-c6884e06c5d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728125] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance df46c28d-7cbd-490e-8db2-9730e4d9f953 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728125] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance be3de9bd-da98-4c7e-ad7c-933245523695 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1016.728125] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 63b744d2-541a-42e3-9717-b06a4459fd50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728364] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 76ec31e6-65c2-4290-9ec0-b274be95baa4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728364] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 3ff9192b-3956-49f6-afd2-827759826056 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1016.728455] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 0ce11868-fee2-40d3-9433-7bc398a1f756 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728494] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance afc1eb16-c275-4b3b-a7fe-9938d2241e24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728621] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 37bae4b3-6959-4f44-8600-26a4f859103c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728667] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 5fc4f1b8-9024-4155-b56d-56a8d08f0259 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.728892] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 95509bbe-5aaf-471f-97b3-8a3085797568 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1016.729021] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance fe6f2a15-f42a-4f63-8dfa-175adadf5c02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.729126] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 9ec1ff52-7fbd-4530-9377-caeff103360b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.729230] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance b50b7e64-6f7f-4abc-a4b1-93408a723298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1016.757740] env[65758]: DEBUG nova.policy [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b6e413458a84a9b8f2b6dcd0061fc33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd550f85853f447bb91a89b6bc6c5720', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1016.771359] env[65758]: DEBUG oslo_vmware.api [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660970, 'name': ReconfigVM_Task, 'duration_secs': 0.189074} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.771775] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909975', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'name': 'volume-51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df46c28d-7cbd-490e-8db2-9730e4d9f953', 'attached_at': '', 'detached_at': '', 'volume_id': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05', 'serial': '51ed0fd6-0a9d-417e-be08-c8c05d6bcc05'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1016.877807] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660971, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.898260] env[65758]: WARNING neutronclient.v2_0.client [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1016.898711] env[65758]: WARNING openstack [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1016.899198] env[65758]: WARNING openstack [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1017.045407] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52309073-f108-901a-3f2c-95c690aeebad, 'name': SearchDatastore_Task, 'duration_secs': 0.026191} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.046282] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.046282] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.046500] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.046662] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.046877] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.047171] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94dd1e2f-215b-4851-a99b-e416ec31ec64 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.057448] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.057662] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.058466] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0605e3af-3a02-4890-9087-d93569e0b06c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.065260] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1017.065260] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52056e06-c9bf-0ff8-0c39-b55c6eb0482c" [ 1017.065260] env[65758]: _type = "Task" [ 1017.065260] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.081833] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52056e06-c9bf-0ff8-0c39-b55c6eb0482c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.142422] env[65758]: DEBUG nova.network.neutron [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Successfully created port: 4f1651f5-5511-4231-b401-c0eb4cb9f9eb {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1017.174079] env[65758]: WARNING neutronclient.v2_0.client [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1017.174891] env[65758]: WARNING openstack [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1017.175277] env[65758]: WARNING openstack [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1017.192350] env[65758]: DEBUG nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1017.232670] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 89167b37-4c21-4678-a0f0-5a4ce932c4d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1797}} [ 1017.232983] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1017.233234] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=100GB used_disk=15GB total_vcpus=48 used_vcpus=16 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '16', 'num_vm_active': '12', 'num_task_None': '13', 'num_os_type_None': '16', 'num_proj_e114eef3998848699a9a086fee86db29': '2', 'io_workload': '3', 'num_proj_693b129cd84f4eee9971e7221e92c3e0': '3', 'num_vm_rescued': '1', 'num_proj_c4c2ab2b80c04c38bfb4c7cafac87fe6': '1', 'num_proj_16188c7bd36d4b0eaffdc980b71ac727': '1', 'num_proj_8be788d761114dfca7244f953b571c7d': '1', 'num_task_image_uploading': '1', 'num_proj_4095654557a34bb0907071aedb3bb678': '1', 'num_proj_3efa562362e94a48851ef7efa8c35123': '1', 'num_proj_e2440f1694fe4b87a9827f6653ff2e4c': '1', 'num_proj_eaa7a78affb743fe9a31cb24f537f30c': '1', 'num_proj_64ffccae76ed401582dd915ae5f87922': '1', 'num_vm_building': '3', 'num_task_spawning': '2', 'num_proj_5566280dda3a4e6fa6821d7ef711c108': '1', 'num_proj_3e3a324879d646699f950687546ea861': '1', 'num_proj_fd550f85853f447bb91a89b6bc6c5720': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1017.325660] env[65758]: DEBUG nova.objects.instance [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'flavor' on Instance uuid df46c28d-7cbd-490e-8db2-9730e4d9f953 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.375110] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660971, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695071} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.380226] env[65758]: DEBUG nova.network.neutron [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Updated VIF entry in instance network info cache for port 295a6d0b-82a8-470a-8be9-077f59f98374. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1017.380606] env[65758]: DEBUG nova.network.neutron [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Updating instance_info_cache with network_info: [{"id": "295a6d0b-82a8-470a-8be9-077f59f98374", "address": "fa:16:3e:a9:dd:fc", "network": {"id": "74b8c6f4-c84c-4024-ab94-4a479f59ce4f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-661938722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e3a324879d646699f950687546ea861", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap295a6d0b-82", "ovs_interfaceid": "295a6d0b-82a8-470a-8be9-077f59f98374", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1017.382860] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] fe6f2a15-f42a-4f63-8dfa-175adadf5c02/fe6f2a15-f42a-4f63-8dfa-175adadf5c02.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1017.386024] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.386024] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c15d5f4-101e-49ef-9403-de2f88d766c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.393805] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1017.393805] env[65758]: value = "task-4660973" [ 1017.393805] env[65758]: _type = "Task" [ 1017.393805] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.410907] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660973, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.568754] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0f272b-629a-46b9-a04e-210132fef49f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.583831] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52056e06-c9bf-0ff8-0c39-b55c6eb0482c, 'name': SearchDatastore_Task, 'duration_secs': 0.016} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.585439] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26eea86-60b4-45ce-9874-ed3a302be8ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.588909] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffb09903-ffe4-4c43-afb7-c60c9fb3b592 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.596654] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1017.596654] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524f72d0-af5d-e5bc-c3d6-6c5f9517a625" [ 1017.596654] env[65758]: _type = "Task" [ 1017.596654] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.631557] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad3e6e5-5345-4a4a-80a7-6f38217b97e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.651029] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8241482-a999-47bb-8dc0-ebc08afcb046 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.656275] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524f72d0-af5d-e5bc-c3d6-6c5f9517a625, 'name': SearchDatastore_Task, 'duration_secs': 0.013906} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.657211] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.657475] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 9ec1ff52-7fbd-4530-9377-caeff103360b/9ec1ff52-7fbd-4530-9377-caeff103360b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1017.658200] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e45a0141-40bb-4d0d-a982-ae2c1c316e24 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.670888] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.676414] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1017.676414] env[65758]: value = "task-4660974" [ 1017.676414] env[65758]: _type = "Task" [ 1017.676414] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.686900] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.885263] env[65758]: DEBUG oslo_concurrency.lockutils [req-38932766-a293-4c24-a459-89810ed84c94 req-bf44dc57-7b14-4246-a6c5-f6628a5a04c8 service nova] Releasing lock "refresh_cache-9ec1ff52-7fbd-4530-9377-caeff103360b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.911298] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660973, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074155} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.912101] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1017.912981] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e073a5-f5fd-4299-89b8-4bb1b5ab0540 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.937462] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] fe6f2a15-f42a-4f63-8dfa-175adadf5c02/fe6f2a15-f42a-4f63-8dfa-175adadf5c02.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.937852] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ce968c2-b3e2-4015-9b4f-f3f0eeee4665 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.959714] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1017.959714] env[65758]: value = "task-4660975" [ 1017.959714] env[65758]: _type = "Task" [ 1017.959714] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.973787] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660975, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.144683] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.144683] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.145210] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "7f5911fb-785e-444c-9408-c6884e06c5d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.145269] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.145574] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.154043] env[65758]: INFO nova.compute.manager [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Terminating instance [ 1018.174729] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.199934] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660974, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.202727] env[65758]: DEBUG nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1018.267406] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1018.267912] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1018.268360] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1018.268676] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1018.268945] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1018.269149] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1018.269390] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.269583] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1018.269776] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1018.270129] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1018.270406] env[65758]: DEBUG nova.virt.hardware [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1018.271824] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720784d0-1cc6-435f-9ee6-1489c7ad9187 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.284049] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03465eb6-f1f7-46cf-b7af-cc716241f15d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.335597] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5a692a2-af7f-443f-bde1-3da43b1e108f tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.268s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.471746] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660975, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.661287] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.663196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.663196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "df46c28d-7cbd-490e-8db2-9730e4d9f953-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.663196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.663196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.668032] env[65758]: INFO nova.compute.manager [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Terminating instance [ 1018.669589] env[65758]: DEBUG nova.compute.manager [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1018.669811] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1018.675505] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4268975b-6d89-40f4-bebb-f47aff7af25c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.680499] env[65758]: DEBUG nova.compute.manager [req-f4f79f7c-9f65-4b2d-a863-f44d8ab92ed3 req-090d3adb-49fa-414d-9c70-5087fb8f1b38 service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Received event network-vif-plugged-4f1651f5-5511-4231-b401-c0eb4cb9f9eb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1018.680729] env[65758]: DEBUG oslo_concurrency.lockutils [req-f4f79f7c-9f65-4b2d-a863-f44d8ab92ed3 req-090d3adb-49fa-414d-9c70-5087fb8f1b38 service nova] Acquiring lock "b50b7e64-6f7f-4abc-a4b1-93408a723298-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.681045] env[65758]: DEBUG oslo_concurrency.lockutils [req-f4f79f7c-9f65-4b2d-a863-f44d8ab92ed3 req-090d3adb-49fa-414d-9c70-5087fb8f1b38 service nova] Lock "b50b7e64-6f7f-4abc-a4b1-93408a723298-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.681106] env[65758]: DEBUG oslo_concurrency.lockutils [req-f4f79f7c-9f65-4b2d-a863-f44d8ab92ed3 req-090d3adb-49fa-414d-9c70-5087fb8f1b38 service nova] Lock "b50b7e64-6f7f-4abc-a4b1-93408a723298-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.682683] env[65758]: DEBUG nova.compute.manager [req-f4f79f7c-9f65-4b2d-a863-f44d8ab92ed3 req-090d3adb-49fa-414d-9c70-5087fb8f1b38 service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] No waiting events found dispatching network-vif-plugged-4f1651f5-5511-4231-b401-c0eb4cb9f9eb {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1018.682683] env[65758]: WARNING nova.compute.manager [req-f4f79f7c-9f65-4b2d-a863-f44d8ab92ed3 req-090d3adb-49fa-414d-9c70-5087fb8f1b38 service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Received unexpected event network-vif-plugged-4f1651f5-5511-4231-b401-c0eb4cb9f9eb for instance with vm_state building and task_state spawning. [ 1018.685857] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1018.685857] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.507s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.693508] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.424s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.694338] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.699019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.076s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.699019] env[65758]: INFO nova.compute.claims [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1018.707656] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.709661] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93dd39df-a7d9-4300-b073-4d57e3ef5772 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.715765] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621828} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.716027] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 9ec1ff52-7fbd-4530-9377-caeff103360b/9ec1ff52-7fbd-4530-9377-caeff103360b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1018.716246] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1018.716567] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa74dda0-9430-43a5-9ade-06e2846d6183 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.722164] env[65758]: DEBUG oslo_vmware.api [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1018.722164] env[65758]: value = "task-4660976" [ 1018.722164] env[65758]: _type = "Task" [ 1018.722164] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.729248] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1018.729248] env[65758]: value = "task-4660977" [ 1018.729248] env[65758]: _type = "Task" [ 1018.729248] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.741678] env[65758]: DEBUG oslo_vmware.api [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660976, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.742872] env[65758]: INFO nova.scheduler.client.report [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted allocations for instance 3ff9192b-3956-49f6-afd2-827759826056 [ 1018.751024] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660977, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.822476] env[65758]: DEBUG nova.network.neutron [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Successfully updated port: 4f1651f5-5511-4231-b401-c0eb4cb9f9eb {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1018.973054] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660975, 'name': ReconfigVM_Task, 'duration_secs': 0.520844} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.973345] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Reconfigured VM instance instance-0000005b to attach disk [datastore2] fe6f2a15-f42a-4f63-8dfa-175adadf5c02/fe6f2a15-f42a-4f63-8dfa-175adadf5c02.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.974429] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-623ac0f9-619e-4558-8df4-bf5df4026cbd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.982491] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1018.982491] env[65758]: value = "task-4660978" [ 1018.982491] env[65758]: _type = "Task" [ 1018.982491] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.995792] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660978, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.093560] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1019.093845] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910014', 'volume_id': 'e29ad381-7d88-46b8-b08e-180dc4b43679', 'name': 'volume-e29ad381-7d88-46b8-b08e-180dc4b43679', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49', 'attached_at': '', 'detached_at': '', 'volume_id': 'e29ad381-7d88-46b8-b08e-180dc4b43679', 'serial': 'e29ad381-7d88-46b8-b08e-180dc4b43679'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1019.094784] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6296bd46-364c-46c0-a54c-038899441c62 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.114187] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f568fed-b818-4234-9fd0-87f6a30d9a1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.141527] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] volume-e29ad381-7d88-46b8-b08e-180dc4b43679/volume-e29ad381-7d88-46b8-b08e-180dc4b43679.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.141658] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-805912e7-d4a7-4eb5-8b80-54792ecd9aab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.163606] env[65758]: DEBUG oslo_vmware.api [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1019.163606] env[65758]: value = "task-4660979" [ 1019.163606] env[65758]: _type = "Task" [ 1019.163606] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.172811] env[65758]: DEBUG oslo_vmware.api [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660979, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.185034] env[65758]: DEBUG nova.compute.manager [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1019.185271] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1019.186214] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411229b8-d3ed-43a3-8548-63f0c7fa5de4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.197400] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.198332] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a52ce09-a64e-46e1-a156-89d1d48adcb5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.206198] env[65758]: DEBUG oslo_vmware.api [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1019.206198] env[65758]: value = "task-4660980" [ 1019.206198] env[65758]: _type = "Task" [ 1019.206198] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.221140] env[65758]: DEBUG oslo_vmware.api [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.238746] env[65758]: DEBUG oslo_vmware.api [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660976, 'name': PowerOffVM_Task, 'duration_secs': 0.22051} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.239041] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1019.239227] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1019.239927] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-330056bb-92b7-461f-9a5c-0549915f43a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.245767] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660977, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162009} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.246056] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.246881] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac8849f-ac0d-423f-abdb-4ad13c3ce542 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.275256] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 9ec1ff52-7fbd-4530-9377-caeff103360b/9ec1ff52-7fbd-4530-9377-caeff103360b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.276025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5dbf84bf-ea99-4f45-b0b0-d5f7ce906228 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "3ff9192b-3956-49f6-afd2-827759826056" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.583s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.278039] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7101483-32f5-47ea-afde-95011e83d5b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.299859] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1019.299859] env[65758]: value = "task-4660982" [ 1019.299859] env[65758]: _type = "Task" [ 1019.299859] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.310840] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660982, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.321514] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1019.321666] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1019.321880] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Deleting the datastore file [datastore2] 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1019.322206] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d22c04b-cb87-4dfc-8ff7-deb0daf26964 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.325680] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "refresh_cache-b50b7e64-6f7f-4abc-a4b1-93408a723298" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.325883] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "refresh_cache-b50b7e64-6f7f-4abc-a4b1-93408a723298" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.326111] env[65758]: DEBUG nova.network.neutron [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1019.332723] env[65758]: DEBUG oslo_vmware.api [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1019.332723] env[65758]: value = "task-4660983" [ 1019.332723] env[65758]: _type = "Task" [ 1019.332723] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.346749] env[65758]: DEBUG oslo_vmware.api [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.494188] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660978, 'name': Rename_Task, 'duration_secs': 0.163469} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.494621] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1019.494789] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37bb2970-3faa-4de2-b4ad-cfd40b7ede61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.503957] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1019.503957] env[65758]: value = "task-4660984" [ 1019.503957] env[65758]: _type = "Task" [ 1019.503957] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.515228] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.676883] env[65758]: DEBUG oslo_vmware.api [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660979, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.724245] env[65758]: DEBUG oslo_vmware.api [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660980, 'name': PowerOffVM_Task, 'duration_secs': 0.231442} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.724634] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1019.724873] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1019.725228] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03b45a58-9038-41eb-a071-477c6ab390be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.816761] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660982, 'name': ReconfigVM_Task, 'duration_secs': 0.342505} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.818599] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 9ec1ff52-7fbd-4530-9377-caeff103360b/9ec1ff52-7fbd-4530-9377-caeff103360b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1019.821817] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63bd327b-7f98-4270-9f47-70b6216041af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.824112] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1019.824398] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1019.824624] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleting the datastore file [datastore2] df46c28d-7cbd-490e-8db2-9730e4d9f953 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1019.827664] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37e6f069-c755-4953-be53-ce30c694e035 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.830665] env[65758]: WARNING openstack [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1019.831060] env[65758]: WARNING openstack [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1019.843226] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1019.843226] env[65758]: value = "task-4660986" [ 1019.843226] env[65758]: _type = "Task" [ 1019.843226] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.858950] env[65758]: DEBUG oslo_vmware.api [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1019.858950] env[65758]: value = "task-4660987" [ 1019.858950] env[65758]: _type = "Task" [ 1019.858950] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.860241] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660986, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.860241] env[65758]: DEBUG oslo_vmware.api [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4660983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293979} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.860241] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1019.860241] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1019.860241] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1019.860494] env[65758]: INFO nova.compute.manager [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1019.860562] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1019.864302] env[65758]: DEBUG nova.compute.manager [-] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1019.864504] env[65758]: DEBUG nova.network.neutron [-] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1019.864688] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1019.865274] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1019.866171] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1019.883169] env[65758]: DEBUG oslo_vmware.api [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.892816] env[65758]: DEBUG nova.network.neutron [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1019.917797] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1020.017506] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660984, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.030280] env[65758]: WARNING neutronclient.v2_0.client [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1020.031869] env[65758]: WARNING openstack [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1020.035026] env[65758]: WARNING openstack [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1020.065272] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90396398-f461-4b14-9be9-2cf729651324 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.075025] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae6d3a8-e2f3-4234-951d-038b85ca2534 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.128261] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbc41b7-f845-4b94-b19c-b6de94c4e02a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.141400] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6d2651-f3c1-43e2-b9e1-e78495a56574 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.156042] env[65758]: DEBUG nova.compute.provider_tree [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.178115] env[65758]: DEBUG oslo_vmware.api [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660979, 'name': ReconfigVM_Task, 'duration_secs': 0.543354} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.178509] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfigured VM instance instance-0000003b to attach disk [datastore1] volume-e29ad381-7d88-46b8-b08e-180dc4b43679/volume-e29ad381-7d88-46b8-b08e-180dc4b43679.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.185950] env[65758]: DEBUG nova.network.neutron [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Updating instance_info_cache with network_info: [{"id": "4f1651f5-5511-4231-b401-c0eb4cb9f9eb", "address": "fa:16:3e:d3:2d:27", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1651f5-55", "ovs_interfaceid": "4f1651f5-5511-4231-b401-c0eb4cb9f9eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1020.187558] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed081e13-8c9e-407d-9e82-5b72eaa1f43e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.209425] env[65758]: DEBUG oslo_vmware.api [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1020.209425] env[65758]: value = "task-4660988" [ 1020.209425] env[65758]: _type = "Task" [ 1020.209425] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.224281] env[65758]: DEBUG oslo_vmware.api [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660988, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.355259] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660986, 'name': Rename_Task, 'duration_secs': 0.293295} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.355581] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1020.355821] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d0f14b0-d4f5-44ca-b073-27a170f4bbd1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.368134] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1020.368134] env[65758]: value = "task-4660989" [ 1020.368134] env[65758]: _type = "Task" [ 1020.368134] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.375800] env[65758]: DEBUG oslo_vmware.api [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4660987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252319} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.376538] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.376727] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1020.377445] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.377445] env[65758]: INFO nova.compute.manager [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1020.377445] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1020.377593] env[65758]: DEBUG nova.compute.manager [-] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1020.377669] env[65758]: DEBUG nova.network.neutron [-] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1020.377892] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1020.378489] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1020.378751] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1020.389436] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.433476] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1020.517028] env[65758]: DEBUG oslo_vmware.api [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4660984, 'name': PowerOnVM_Task, 'duration_secs': 0.563336} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.517028] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1020.517028] env[65758]: INFO nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Took 9.30 seconds to spawn the instance on the hypervisor. [ 1020.517028] env[65758]: DEBUG nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1020.517529] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1c2857-76d2-4bf0-af9c-12f89b98b06b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.658942] env[65758]: DEBUG nova.scheduler.client.report [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1020.699986] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "refresh_cache-b50b7e64-6f7f-4abc-a4b1-93408a723298" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.700407] env[65758]: DEBUG nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Instance network_info: |[{"id": "4f1651f5-5511-4231-b401-c0eb4cb9f9eb", "address": "fa:16:3e:d3:2d:27", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1651f5-55", "ovs_interfaceid": "4f1651f5-5511-4231-b401-c0eb4cb9f9eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1020.700885] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:2d:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f1651f5-5511-4231-b401-c0eb4cb9f9eb', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.709193] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1020.709466] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.709702] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12952965-eb8e-49b1-82db-619ecf0cde7a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.736898] env[65758]: DEBUG oslo_vmware.api [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4660988, 'name': ReconfigVM_Task, 'duration_secs': 0.186037} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.739696] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910014', 'volume_id': 'e29ad381-7d88-46b8-b08e-180dc4b43679', 'name': 'volume-e29ad381-7d88-46b8-b08e-180dc4b43679', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49', 'attached_at': '', 'detached_at': '', 'volume_id': 'e29ad381-7d88-46b8-b08e-180dc4b43679', 'serial': 'e29ad381-7d88-46b8-b08e-180dc4b43679'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1020.741057] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.741057] env[65758]: value = "task-4660990" [ 1020.741057] env[65758]: _type = "Task" [ 1020.741057] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.743326] env[65758]: DEBUG nova.compute.manager [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Received event network-changed-4f1651f5-5511-4231-b401-c0eb4cb9f9eb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1020.743326] env[65758]: DEBUG nova.compute.manager [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Refreshing instance network info cache due to event network-changed-4f1651f5-5511-4231-b401-c0eb4cb9f9eb. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1020.743326] env[65758]: DEBUG oslo_concurrency.lockutils [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] Acquiring lock "refresh_cache-b50b7e64-6f7f-4abc-a4b1-93408a723298" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.743326] env[65758]: DEBUG oslo_concurrency.lockutils [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] Acquired lock "refresh_cache-b50b7e64-6f7f-4abc-a4b1-93408a723298" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.743326] env[65758]: DEBUG nova.network.neutron [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Refreshing network info cache for port 4f1651f5-5511-4231-b401-c0eb4cb9f9eb {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1020.756546] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660990, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.883522] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660989, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.984964] env[65758]: DEBUG nova.network.neutron [-] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1021.016244] env[65758]: DEBUG nova.compute.manager [req-e7fe2c85-80b6-45a5-b69c-c43b22814454 req-5c12db1a-8c41-4fb3-a43b-c1646458ed9c service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Received event network-vif-deleted-f597ff54-9371-4703-893c-3b7ad96d394d {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1021.016244] env[65758]: INFO nova.compute.manager [req-e7fe2c85-80b6-45a5-b69c-c43b22814454 req-5c12db1a-8c41-4fb3-a43b-c1646458ed9c service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Neutron deleted interface f597ff54-9371-4703-893c-3b7ad96d394d; detaching it from the instance and deleting it from the info cache [ 1021.016331] env[65758]: DEBUG nova.network.neutron [req-e7fe2c85-80b6-45a5-b69c-c43b22814454 req-5c12db1a-8c41-4fb3-a43b-c1646458ed9c service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1021.027850] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "63b744d2-541a-42e3-9717-b06a4459fd50" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.028541] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.028541] env[65758]: INFO nova.compute.manager [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Shelving [ 1021.045463] env[65758]: INFO nova.compute.manager [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Took 19.02 seconds to build instance. [ 1021.166830] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.167547] env[65758]: DEBUG nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1021.171271] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.210s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.171573] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.174536] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.352s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.174642] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.210076] env[65758]: INFO nova.scheduler.client.report [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Deleted allocations for instance 95509bbe-5aaf-471f-97b3-8a3085797568 [ 1021.210076] env[65758]: INFO nova.scheduler.client.report [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted allocations for instance be3de9bd-da98-4c7e-ad7c-933245523695 [ 1021.217258] env[65758]: WARNING oslo_messaging._drivers.amqpdriver [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1021.251189] env[65758]: WARNING neutronclient.v2_0.client [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1021.252570] env[65758]: WARNING openstack [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1021.255168] env[65758]: WARNING openstack [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1021.291052] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660990, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.390668] env[65758]: DEBUG oslo_vmware.api [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4660989, 'name': PowerOnVM_Task, 'duration_secs': 0.575928} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.395024] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1021.395024] env[65758]: INFO nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1021.395024] env[65758]: DEBUG nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1021.395024] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98527746-96e2-411f-8fca-bfb3f16df169 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.486673] env[65758]: DEBUG nova.network.neutron [-] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1021.487030] env[65758]: INFO nova.compute.manager [-] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Took 1.62 seconds to deallocate network for instance. [ 1021.488852] env[65758]: WARNING neutronclient.v2_0.client [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1021.489787] env[65758]: WARNING openstack [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1021.490261] env[65758]: WARNING openstack [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1021.520087] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c5bcdea-d534-45f4-8cd9-1944851d08a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.538821] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c629224-921d-40d3-a0e9-991e6b0296aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.554853] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d7a50a99-1569-4524-97f4-983fdaf87c39 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.539s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.590107] env[65758]: DEBUG nova.compute.manager [req-e7fe2c85-80b6-45a5-b69c-c43b22814454 req-5c12db1a-8c41-4fb3-a43b-c1646458ed9c service nova] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Detach interface failed, port_id=f597ff54-9371-4703-893c-3b7ad96d394d, reason: Instance df46c28d-7cbd-490e-8db2-9730e4d9f953 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1021.595174] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.595467] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.595704] env[65758]: INFO nova.compute.manager [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Rebooting instance [ 1021.601108] env[65758]: DEBUG nova.network.neutron [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Updated VIF entry in instance network info cache for port 4f1651f5-5511-4231-b401-c0eb4cb9f9eb. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1021.601461] env[65758]: DEBUG nova.network.neutron [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Updating instance_info_cache with network_info: [{"id": "4f1651f5-5511-4231-b401-c0eb4cb9f9eb", "address": "fa:16:3e:d3:2d:27", "network": {"id": "020b90a8-a4ed-4336-880e-d978398b08e2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1587068247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd550f85853f447bb91a89b6bc6c5720", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1651f5-55", "ovs_interfaceid": "4f1651f5-5511-4231-b401-c0eb4cb9f9eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1021.680360] env[65758]: DEBUG nova.compute.utils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1021.682669] env[65758]: DEBUG nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1021.682873] env[65758]: DEBUG nova.network.neutron [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1021.683219] env[65758]: WARNING neutronclient.v2_0.client [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1021.683531] env[65758]: WARNING neutronclient.v2_0.client [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1021.684192] env[65758]: WARNING openstack [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1021.684695] env[65758]: WARNING openstack [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1021.734502] env[65758]: DEBUG nova.policy [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd07b5ba2c3ef430293fbf39148961763', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bad3e3c7054c424a800cb12e9c5dbb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1021.737430] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e97d06d8-640c-40f6-8b13-4688d1f79a5d tempest-ServerMetadataNegativeTestJSON-665928713 tempest-ServerMetadataNegativeTestJSON-665928713-project-member] Lock "95509bbe-5aaf-471f-97b3-8a3085797568" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.044s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.739172] env[65758]: DEBUG oslo_concurrency.lockutils [None req-234883a2-e022-4f43-901c-e3989c86749f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "be3de9bd-da98-4c7e-ad7c-933245523695" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.520s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.765145] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660990, 'name': CreateVM_Task, 'duration_secs': 0.547886} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.765145] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1021.765145] env[65758]: WARNING neutronclient.v2_0.client [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1021.765416] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.765653] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.766119] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1021.766711] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9645de6-27eb-4395-b876-46eed6c87175 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.772739] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1021.772739] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5282fa23-b980-2f54-5c58-05cef854667a" [ 1021.772739] env[65758]: _type = "Task" [ 1021.772739] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.783344] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5282fa23-b980-2f54-5c58-05cef854667a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.819948] env[65758]: DEBUG nova.objects.instance [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lazy-loading 'flavor' on Instance uuid 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.917691] env[65758]: INFO nova.compute.manager [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Took 18.68 seconds to build instance. [ 1021.988043] env[65758]: INFO nova.compute.manager [-] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Took 1.61 seconds to deallocate network for instance. [ 1022.005893] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.006312] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.006559] env[65758]: DEBUG nova.objects.instance [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'resources' on Instance uuid 7f5911fb-785e-444c-9408-c6884e06c5d3 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.062273] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.062662] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ee709f6-e852-4f42-a242-892b0e52d6cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.071816] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1022.071816] env[65758]: value = "task-4660991" [ 1022.071816] env[65758]: _type = "Task" [ 1022.071816] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.088483] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.104623] env[65758]: DEBUG oslo_concurrency.lockutils [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] Releasing lock "refresh_cache-b50b7e64-6f7f-4abc-a4b1-93408a723298" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.105161] env[65758]: DEBUG nova.compute.manager [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Received event network-vif-deleted-1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1022.105431] env[65758]: INFO nova.compute.manager [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Neutron deleted interface 1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea; detaching it from the instance and deleting it from the info cache [ 1022.105575] env[65758]: DEBUG nova.network.neutron [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1022.126022] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.126022] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquired lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.126692] env[65758]: DEBUG nova.network.neutron [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1022.129140] env[65758]: DEBUG nova.network.neutron [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Successfully created port: 4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1022.188735] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b5d3b-9582-d7aa-54ec-ca331b1a9e34/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1022.190190] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c78adf8-48cb-4beb-9fbc-bacbe0da0151 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.193598] env[65758]: DEBUG nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1022.203570] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b5d3b-9582-d7aa-54ec-ca331b1a9e34/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1022.203851] env[65758]: ERROR oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b5d3b-9582-d7aa-54ec-ca331b1a9e34/disk-0.vmdk due to incomplete transfer. [ 1022.204129] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-124045be-358c-4c61-a6a3-bf00ab658410 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.215509] env[65758]: DEBUG oslo_vmware.rw_handles [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b5d3b-9582-d7aa-54ec-ca331b1a9e34/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1022.215732] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Uploaded image fcf58575-c665-48d7-add3-26ecbec71675 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1022.218864] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1022.219249] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8c5a714c-5b43-46e3-af78-0d851481ea1a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.231154] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1022.231154] env[65758]: value = "task-4660992" [ 1022.231154] env[65758]: _type = "Task" [ 1022.231154] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.243724] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660992, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.285636] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5282fa23-b980-2f54-5c58-05cef854667a, 'name': SearchDatastore_Task, 'duration_secs': 0.023165} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.285994] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.286317] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1022.286564] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.286711] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.286923] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1022.287263] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b9f72d1-e213-49a3-9a35-a911d2a1a89d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.310310] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1022.310564] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1022.311399] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63c1c554-d7af-4cd6-be31-a97df86c95d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.319760] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1022.319760] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52794f55-e13c-7ae4-9ebb-ce4e2549cfbe" [ 1022.319760] env[65758]: _type = "Task" [ 1022.319760] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.326779] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2e9e50e2-aba0-4b3c-bf23-1bceb7474ef9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.402s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.332169] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52794f55-e13c-7ae4-9ebb-ce4e2549cfbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.419489] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0297a917-4cd0-4e6d-996b-8c1c77f82998 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "9ec1ff52-7fbd-4530-9377-caeff103360b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.195s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.495556] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.589550] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660991, 'name': PowerOffVM_Task, 'duration_secs': 0.455935} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.590704] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1022.591011] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad77df78-7f66-4cd1-b9d5-9d87913f2c4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.617979] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56b3723a-461f-4054-8b04-a8283ae8a9d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.620700] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c75671-037d-4387-aa06-866b83db5e9a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.638410] env[65758]: WARNING neutronclient.v2_0.client [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1022.639316] env[65758]: WARNING openstack [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1022.639677] env[65758]: WARNING openstack [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1022.657492] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d468b41-2c63-455f-a4e9-98feca373576 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.696494] env[65758]: DEBUG nova.compute.manager [req-0c782a10-ef41-4ea6-a4d4-d3e319e4ecd3 req-6d1bdaf1-dc6c-4010-889e-267db0a7e56d service nova] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Detach interface failed, port_id=1cb28e5b-cbe4-4e1a-beb4-2de6e7d248ea, reason: Instance 7f5911fb-785e-444c-9408-c6884e06c5d3 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1022.750926] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660992, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.832930] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52794f55-e13c-7ae4-9ebb-ce4e2549cfbe, 'name': SearchDatastore_Task, 'duration_secs': 0.017156} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.837210] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fa0d0ca-d2b5-46d9-b834-5b194a33f7d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.845315] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1022.845315] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522e6918-3470-4895-c12a-0c1627994f10" [ 1022.845315] env[65758]: _type = "Task" [ 1022.845315] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.862953] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522e6918-3470-4895-c12a-0c1627994f10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.934029] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa63d85-0fb7-434b-a85b-bfddb4f416e1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.942573] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e41221-72f3-459b-982a-7d25d6d19081 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.982559] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d8718d-df92-4e8e-b01b-67a52ca26283 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.992029] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bdecdc-4c9d-4283-b630-4d4d09174ddd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.009988] env[65758]: DEBUG nova.compute.provider_tree [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.171624] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1023.172288] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-257f9b04-d8de-48a5-a378-c53ecdf2c34e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.182728] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1023.182728] env[65758]: value = "task-4660993" [ 1023.182728] env[65758]: _type = "Task" [ 1023.182728] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.192797] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660993, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.206565] env[65758]: DEBUG nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1023.249017] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1023.249386] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1023.249587] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1023.249841] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1023.250055] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1023.250565] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1023.250898] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1023.251147] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1023.251408] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1023.251704] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1023.251898] env[65758]: DEBUG nova.virt.hardware [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1023.253368] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd209341-6fbf-4039-91e0-4c4c8ad1e56a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.265563] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660992, 'name': Destroy_Task, 'duration_secs': 0.722924} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.265563] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Destroyed the VM [ 1023.265563] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1023.265563] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a62dbfdb-dacf-42d8-bb8e-1080dfeb6685 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.270535] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262712de-5888-4f19-8164-acb3b0a7c6ec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.278373] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1023.278373] env[65758]: value = "task-4660994" [ 1023.278373] env[65758]: _type = "Task" [ 1023.278373] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.298257] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660994, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.356703] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522e6918-3470-4895-c12a-0c1627994f10, 'name': SearchDatastore_Task, 'duration_secs': 0.014606} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.358127] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.358127] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] b50b7e64-6f7f-4abc-a4b1-93408a723298/b50b7e64-6f7f-4abc-a4b1-93408a723298.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1023.358127] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d8bb700-225a-408e-80a2-59d794034255 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.365434] env[65758]: INFO nova.compute.manager [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Rescuing [ 1023.365601] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.365708] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.365847] env[65758]: DEBUG nova.network.neutron [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1023.367257] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1023.367257] env[65758]: value = "task-4660995" [ 1023.367257] env[65758]: _type = "Task" [ 1023.367257] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.377863] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660995, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.513500] env[65758]: DEBUG nova.scheduler.client.report [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1023.697751] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660993, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.794137] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660994, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.874489] env[65758]: WARNING neutronclient.v2_0.client [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1023.875386] env[65758]: WARNING openstack [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1023.875859] env[65758]: WARNING openstack [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1023.891581] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660995, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.903609] env[65758]: DEBUG nova.network.neutron [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Successfully updated port: 4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1024.021140] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.015s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.024592] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.529s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.025406] env[65758]: DEBUG nova.objects.instance [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'resources' on Instance uuid df46c28d-7cbd-490e-8db2-9730e4d9f953 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.055433] env[65758]: INFO nova.scheduler.client.report [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Deleted allocations for instance 7f5911fb-785e-444c-9408-c6884e06c5d3 [ 1024.090385] env[65758]: WARNING neutronclient.v2_0.client [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1024.090708] env[65758]: WARNING openstack [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1024.091801] env[65758]: WARNING openstack [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1024.194104] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660993, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.258333] env[65758]: DEBUG nova.network.neutron [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Updating instance_info_cache with network_info: [{"id": "a6572553-5e5a-4fb9-9384-1cfcd168710b", "address": "fa:16:3e:21:ca:02", "network": {"id": "729a6c0b-369b-4f6f-9ac6-aa6347bc5a19", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1604192611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5566280dda3a4e6fa6821d7ef711c108", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6572553-5e", "ovs_interfaceid": "a6572553-5e5a-4fb9-9384-1cfcd168710b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1024.291425] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660994, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.346206] env[65758]: WARNING neutronclient.v2_0.client [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1024.346906] env[65758]: WARNING openstack [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1024.347278] env[65758]: WARNING openstack [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1024.387931] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660995, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659298} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.388561] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] b50b7e64-6f7f-4abc-a4b1-93408a723298/b50b7e64-6f7f-4abc-a4b1-93408a723298.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1024.388913] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1024.389335] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da0f1ecf-c6f7-4724-9f45-476808720906 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.401951] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1024.401951] env[65758]: value = "task-4660996" [ 1024.401951] env[65758]: _type = "Task" [ 1024.401951] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.407143] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.407361] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.407560] env[65758]: DEBUG nova.network.neutron [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1024.416240] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660996, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.565660] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed075291-300a-45dc-bc89-497d81db7a35 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "7f5911fb-785e-444c-9408-c6884e06c5d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.421s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.573059] env[65758]: DEBUG nova.network.neutron [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [{"id": "216bffab-4451-407d-b8dd-9e8687a90b81", "address": "fa:16:3e:74:3c:81", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216bffab-44", "ovs_interfaceid": "216bffab-4451-407d-b8dd-9e8687a90b81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1024.701757] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660993, 'name': CreateSnapshot_Task, 'duration_secs': 1.227063} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.702267] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1024.703602] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52aadc80-95a2-4e79-9026-4b21e6190658 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.722036] env[65758]: DEBUG nova.compute.manager [req-8ea37fc4-30e6-4fa8-bb9e-6430e09af12b req-9dad1aac-2afa-4898-b266-94945962aa6c service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Received event network-vif-plugged-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1024.722036] env[65758]: DEBUG oslo_concurrency.lockutils [req-8ea37fc4-30e6-4fa8-bb9e-6430e09af12b req-9dad1aac-2afa-4898-b266-94945962aa6c service nova] Acquiring lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.723989] env[65758]: DEBUG oslo_concurrency.lockutils [req-8ea37fc4-30e6-4fa8-bb9e-6430e09af12b req-9dad1aac-2afa-4898-b266-94945962aa6c service nova] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.723989] env[65758]: DEBUG oslo_concurrency.lockutils [req-8ea37fc4-30e6-4fa8-bb9e-6430e09af12b req-9dad1aac-2afa-4898-b266-94945962aa6c service nova] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.723989] env[65758]: DEBUG nova.compute.manager [req-8ea37fc4-30e6-4fa8-bb9e-6430e09af12b req-9dad1aac-2afa-4898-b266-94945962aa6c service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] No waiting events found dispatching network-vif-plugged-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1024.723989] env[65758]: WARNING nova.compute.manager [req-8ea37fc4-30e6-4fa8-bb9e-6430e09af12b req-9dad1aac-2afa-4898-b266-94945962aa6c service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Received unexpected event network-vif-plugged-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 for instance with vm_state building and task_state spawning. [ 1024.761743] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Releasing lock "refresh_cache-fe6f2a15-f42a-4f63-8dfa-175adadf5c02" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.801034] env[65758]: DEBUG oslo_vmware.api [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4660994, 'name': RemoveSnapshot_Task, 'duration_secs': 1.160779} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.804391] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1024.804661] env[65758]: INFO nova.compute.manager [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Took 16.74 seconds to snapshot the instance on the hypervisor. [ 1024.913185] env[65758]: WARNING openstack [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1024.913590] env[65758]: WARNING openstack [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1024.921396] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660996, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156775} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.923478] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1024.924365] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0e003e-3ec2-4e6f-a1fb-89ec6840333e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.928583] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e59988-360d-48b4-bf25-6ffa518a529b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.953124] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f84eb8-14af-4fec-a060-e0a3c87adcee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.966253] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] b50b7e64-6f7f-4abc-a4b1-93408a723298/b50b7e64-6f7f-4abc-a4b1-93408a723298.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1024.967349] env[65758]: DEBUG nova.network.neutron [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1024.969466] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9701bcc3-8155-461d-b6fc-d3a728a7185c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.019747] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a5aa44-9f84-474b-9465-acad6d81bac9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.023204] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1025.023204] env[65758]: value = "task-4660997" [ 1025.023204] env[65758]: _type = "Task" [ 1025.023204] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.031379] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d869f41c-df01-41e7-8e55-a83ef16fb42a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.042164] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.053415] env[65758]: DEBUG nova.compute.provider_tree [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.076551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.110618] env[65758]: WARNING neutronclient.v2_0.client [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1025.111043] env[65758]: WARNING openstack [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1025.111406] env[65758]: WARNING openstack [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1025.232716] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1025.233045] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ed5a8f7f-f919-4926-9e28-31fbc07b5622 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.243662] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1025.243662] env[65758]: value = "task-4660998" [ 1025.243662] env[65758]: _type = "Task" [ 1025.243662] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.246257] env[65758]: DEBUG nova.network.neutron [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Updating instance_info_cache with network_info: [{"id": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "address": "fa:16:3e:c6:9a:3f", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4288a41e-2e", "ovs_interfaceid": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1025.261067] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660998, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.267724] env[65758]: DEBUG nova.compute.manager [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1025.268639] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a88949-669b-4bd7-b7df-97facd0bf2e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.374405] env[65758]: DEBUG nova.compute.manager [None req-1d3ee7dc-2ce1-44c6-ade5-64881a17988f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Found 1 images (rotation: 2) {{(pid=65758) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5057}} [ 1025.535608] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.559922] env[65758]: DEBUG nova.scheduler.client.report [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.754726] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.755340] env[65758]: DEBUG nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Instance network_info: |[{"id": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "address": "fa:16:3e:c6:9a:3f", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4288a41e-2e", "ovs_interfaceid": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1025.756028] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:9a:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4288a41e-2eac-4d34-9eb1-c0e6a398cbe1', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1025.769149] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1025.770155] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1025.770507] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50a5f465-dea9-44db-814d-a00b885bd691 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.806654] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660998, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.818826] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1025.818826] env[65758]: value = "task-4660999" [ 1025.818826] env[65758]: _type = "Task" [ 1025.818826] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.829819] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660999, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.903360] env[65758]: DEBUG nova.compute.manager [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1025.904780] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3665f31b-8485-4dbf-a83d-73e954ffd919 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.042567] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4660997, 'name': ReconfigVM_Task, 'duration_secs': 0.768655} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.042861] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Reconfigured VM instance instance-0000005d to attach disk [datastore1] b50b7e64-6f7f-4abc-a4b1-93408a723298/b50b7e64-6f7f-4abc-a4b1-93408a723298.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.043591] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64bb1a26-4528-44ea-b5eb-e7319c0ab93d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.053920] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1026.053920] env[65758]: value = "task-4661000" [ 1026.053920] env[65758]: _type = "Task" [ 1026.053920] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.067647] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661000, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.067647] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.041s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.093137] env[65758]: INFO nova.scheduler.client.report [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted allocations for instance df46c28d-7cbd-490e-8db2-9730e4d9f953 [ 1026.261200] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660998, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.309766] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256481bf-e244-475e-bad3-e1380efa1a33 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.319101] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Doing hard reboot of VM {{(pid=65758) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1026.323825] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-8db07430-e597-4542-9997-b4408a0ffe22 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.336226] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4660999, 'name': CreateVM_Task, 'duration_secs': 0.519021} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.338287] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.339048] env[65758]: DEBUG oslo_vmware.api [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1026.339048] env[65758]: value = "task-4661001" [ 1026.339048] env[65758]: _type = "Task" [ 1026.339048] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.339874] env[65758]: WARNING neutronclient.v2_0.client [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1026.340430] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.340728] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.341179] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1026.341684] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5da7c679-f86e-4f7c-93a3-24fdf6767ca5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.354393] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1026.354393] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f4e19b-e545-a130-a708-455f3ee63dd2" [ 1026.354393] env[65758]: _type = "Task" [ 1026.354393] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.359434] env[65758]: DEBUG oslo_vmware.api [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4661001, 'name': ResetVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.369127] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f4e19b-e545-a130-a708-455f3ee63dd2, 'name': SearchDatastore_Task, 'duration_secs': 0.010836} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.369127] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.369841] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1026.369841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.369841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.369841] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.370211] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aaf8d66a-ce33-4359-a6e7-5005aa41a979 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.381424] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.381726] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1026.382730] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ca768b-3cf5-4b82-877a-ce5feca5ea2a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.390400] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1026.390400] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522502d5-9c50-9eaf-4f4f-a7be21d276d0" [ 1026.390400] env[65758]: _type = "Task" [ 1026.390400] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.400201] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522502d5-9c50-9eaf-4f4f-a7be21d276d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.418639] env[65758]: INFO nova.compute.manager [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] instance snapshotting [ 1026.421765] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c0070e-d087-4396-bdf8-f19d4e925ce9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.443397] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cad78b-c1e3-4f95-9db8-cc0312f7dae8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.534520] env[65758]: DEBUG nova.compute.manager [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1026.567154] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661000, 'name': Rename_Task, 'duration_secs': 0.19481} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.567409] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1026.567722] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c90a4ec-825e-4468-9303-5d47f1c13a25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.577172] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1026.577172] env[65758]: value = "task-4661002" [ 1026.577172] env[65758]: _type = "Task" [ 1026.577172] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.587819] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661002, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.603291] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c806cecb-f369-4d4c-8a9d-ae386863c579 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "df46c28d-7cbd-490e-8db2-9730e4d9f953" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.941s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.611415] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.611850] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f7f1e68-dd3e-4b8c-b488-663bcb92b05a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.621282] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1026.621282] env[65758]: value = "task-4661003" [ 1026.621282] env[65758]: _type = "Task" [ 1026.621282] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.631737] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.756638] env[65758]: DEBUG nova.compute.manager [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Received event network-changed-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1026.757499] env[65758]: DEBUG nova.compute.manager [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Refreshing instance network info cache due to event network-changed-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1026.757499] env[65758]: DEBUG oslo_concurrency.lockutils [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] Acquiring lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.757499] env[65758]: DEBUG oslo_concurrency.lockutils [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] Acquired lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.757499] env[65758]: DEBUG nova.network.neutron [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Refreshing network info cache for port 4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1026.762512] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4660998, 'name': CloneVM_Task, 'duration_secs': 1.441561} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.763716] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Created linked-clone VM from snapshot [ 1026.764372] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8015acc-ad14-49d2-a169-1d3acbee3d72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.774905] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Uploading image df203c52-cb8e-4277-903c-c114ae8627be {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1026.810158] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1026.810158] env[65758]: value = "vm-910018" [ 1026.810158] env[65758]: _type = "VirtualMachine" [ 1026.810158] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1026.810515] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ddd85c15-db20-448f-beac-e6a60cb844d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.820521] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lease: (returnval){ [ 1026.820521] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52db7111-64dc-50f1-cd96-fb0ae78b00cd" [ 1026.820521] env[65758]: _type = "HttpNfcLease" [ 1026.820521] env[65758]: } obtained for exporting VM: (result){ [ 1026.820521] env[65758]: value = "vm-910018" [ 1026.820521] env[65758]: _type = "VirtualMachine" [ 1026.820521] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1026.821032] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the lease: (returnval){ [ 1026.821032] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52db7111-64dc-50f1-cd96-fb0ae78b00cd" [ 1026.821032] env[65758]: _type = "HttpNfcLease" [ 1026.821032] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1026.832430] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1026.832430] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52db7111-64dc-50f1-cd96-fb0ae78b00cd" [ 1026.832430] env[65758]: _type = "HttpNfcLease" [ 1026.832430] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1026.852632] env[65758]: DEBUG oslo_vmware.api [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4661001, 'name': ResetVM_Task, 'duration_secs': 0.108024} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.852758] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Did hard reboot of VM {{(pid=65758) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1026.852941] env[65758]: DEBUG nova.compute.manager [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1026.853935] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbfc043-9d1f-4848-837b-3dc957a5351e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.903826] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522502d5-9c50-9eaf-4f4f-a7be21d276d0, 'name': SearchDatastore_Task, 'duration_secs': 0.022367} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.908220] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f33ab9c-cc11-4654-9701-434f1f60ae1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.914073] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1026.914073] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5266c367-b1cd-5741-62f4-c37ca8eb9fdf" [ 1026.914073] env[65758]: _type = "Task" [ 1026.914073] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.928181] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5266c367-b1cd-5741-62f4-c37ca8eb9fdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.958075] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1026.958519] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-33a4e6f9-9a98-4793-b98c-9826c86f3ba5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.968638] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1026.968638] env[65758]: value = "task-4661005" [ 1026.968638] env[65758]: _type = "Task" [ 1026.968638] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.980048] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661005, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.058676] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.058676] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.090102] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661002, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.131861] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661003, 'name': PowerOffVM_Task, 'duration_secs': 0.233727} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.132260] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.133679] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18077354-7646-4489-8569-9486f704417a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.157392] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0f3897-0f57-440b-a159-80c3ee7256c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.192439] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.192439] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3a77314-e0fa-41e0-b01f-3ffdd9cd4695 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.202824] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1027.202824] env[65758]: value = "task-4661006" [ 1027.202824] env[65758]: _type = "Task" [ 1027.202824] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.213839] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1027.214111] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.214829] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.214829] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.214829] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.215057] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b996706-67b8-4ee3-996f-34136669a927 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.227392] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.228146] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.229529] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-503c9db6-d823-4e83-9cd1-afc45ef3467b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.240199] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1027.240199] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52273c7b-1ac6-2434-cd0c-054aec7216bb" [ 1027.240199] env[65758]: _type = "Task" [ 1027.240199] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.252818] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52273c7b-1ac6-2434-cd0c-054aec7216bb, 'name': SearchDatastore_Task, 'duration_secs': 0.011251} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.253791] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-229539db-4439-4fe0-8e8a-65f21a92c3bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.260972] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1027.260972] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52203c6b-ec79-2edc-c9e0-890ecd52dcf9" [ 1027.260972] env[65758]: _type = "Task" [ 1027.260972] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.265074] env[65758]: WARNING neutronclient.v2_0.client [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1027.265609] env[65758]: WARNING openstack [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1027.266100] env[65758]: WARNING openstack [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1027.281716] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52203c6b-ec79-2edc-c9e0-890ecd52dcf9, 'name': SearchDatastore_Task, 'duration_secs': 0.010589} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.282162] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.282390] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. {{(pid=65758) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1027.282674] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0be93462-8307-460a-b806-e5debe66e5e5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.291450] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1027.291450] env[65758]: value = "task-4661007" [ 1027.291450] env[65758]: _type = "Task" [ 1027.291450] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.301908] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.330017] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1027.330017] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52db7111-64dc-50f1-cd96-fb0ae78b00cd" [ 1027.330017] env[65758]: _type = "HttpNfcLease" [ 1027.330017] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1027.330454] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1027.330454] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52db7111-64dc-50f1-cd96-fb0ae78b00cd" [ 1027.330454] env[65758]: _type = "HttpNfcLease" [ 1027.330454] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1027.331399] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154cc638-cb9b-471d-8819-159e9e788e57 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.343299] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212a21f-c0fc-c50e-edef-bc796d1ba8d5/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1027.343539] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212a21f-c0fc-c50e-edef-bc796d1ba8d5/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1027.405501] env[65758]: DEBUG nova.compute.manager [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1027.411751] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d8fc6488-4919-4ef8-8fed-608fd5953cf7 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.816s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.416027] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a67637-9536-4504-a063-aef4271621a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.434996] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5266c367-b1cd-5741-62f4-c37ca8eb9fdf, 'name': SearchDatastore_Task, 'duration_secs': 0.012084} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.436379] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.436662] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 89167b37-4c21-4678-a0f0-5a4ce932c4d8/89167b37-4c21-4678-a0f0-5a4ce932c4d8.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1027.436972] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f66cc329-d806-4129-a6e5-74c86ad7780f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.446213] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1027.446213] env[65758]: value = "task-4661008" [ 1027.446213] env[65758]: _type = "Task" [ 1027.446213] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.463986] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.471178] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a58f7b9a-55f5-4d5b-a468-205c3ba1c509 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.484899] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661005, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.541976] env[65758]: WARNING neutronclient.v2_0.client [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1027.541976] env[65758]: WARNING openstack [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1027.541976] env[65758]: WARNING openstack [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1027.567203] env[65758]: INFO nova.compute.claims [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.592138] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661002, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.669915] env[65758]: DEBUG nova.network.neutron [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Updated VIF entry in instance network info cache for port 4288a41e-2eac-4d34-9eb1-c0e6a398cbe1. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1027.670559] env[65758]: DEBUG nova.network.neutron [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Updating instance_info_cache with network_info: [{"id": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "address": "fa:16:3e:c6:9a:3f", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4288a41e-2e", "ovs_interfaceid": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1027.805657] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661007, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.937662] env[65758]: INFO nova.compute.manager [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] instance snapshotting [ 1027.938452] env[65758]: DEBUG nova.objects.instance [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'flavor' on Instance uuid 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.961585] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661008, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.982901] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661005, 'name': CreateSnapshot_Task, 'duration_secs': 0.944427} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.983116] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1027.984200] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6476c37-baf9-4906-90fc-a8452f166a49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.080535] env[65758]: INFO nova.compute.resource_tracker [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating resource usage from migration c9995e3c-d411-493e-9f28-8c93e4bf77ec [ 1028.099486] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661002, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.174059] env[65758]: DEBUG oslo_concurrency.lockutils [req-70695d71-ceaa-40e4-ab79-1a95c386502f req-04cb0f71-5a80-43ea-bb76-9b9143572118 service nova] Releasing lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.310335] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582855} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.310826] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. [ 1028.312247] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd987d3f-ee5f-4281-8542-5616394cc074 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.357638] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.362700] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b05ccfd-3b57-4170-add9-5728045ab811 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.384122] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.384831] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.393428] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1028.393428] env[65758]: value = "task-4661009" [ 1028.393428] env[65758]: _type = "Task" [ 1028.393428] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.408194] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661009, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.446075] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e376c0-1be9-43e6-b9e0-ea9328b69f5b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.464448] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.879668} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.484268] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 89167b37-4c21-4678-a0f0-5a4ce932c4d8/89167b37-4c21-4678-a0f0-5a4ce932c4d8.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1028.484747] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1028.486037] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f33be99-7e01-4cac-ba31-60e6e93c0f40 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.491680] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59b22818-2a19-41a2-9a2b-1172db6e3297 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.496894] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b1f1b9-3d18-4faf-8a70-48f0a4ff0e59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.509474] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1028.512447] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b7ad5fbf-028b-45a6-af30-ca6276f2c02f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.523500] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ca478a-c5d0-4069-98eb-327b2d2346d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.530113] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1028.530113] env[65758]: value = "task-4661010" [ 1028.530113] env[65758]: _type = "Task" [ 1028.530113] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.546698] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1028.546698] env[65758]: value = "task-4661011" [ 1028.546698] env[65758]: _type = "Task" [ 1028.546698] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.587525] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8137875d-a3cf-4698-bad4-fc6df9ca518d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.594195] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.596381] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.596381] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.596381] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.596381] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.597567] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661010, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.598169] env[65758]: INFO nova.compute.manager [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Terminating instance [ 1028.616719] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661011, 'name': CloneVM_Task} progress is 23%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.622045] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d94891-9788-4dcb-b718-1fc0ccd92234 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.632725] env[65758]: DEBUG oslo_vmware.api [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661002, 'name': PowerOnVM_Task, 'duration_secs': 1.54312} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.632725] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.632725] env[65758]: INFO nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Took 10.43 seconds to spawn the instance on the hypervisor. [ 1028.632725] env[65758]: DEBUG nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1028.633452] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e99286-9c43-4bac-b9fa-16069a2c7456 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.645898] env[65758]: DEBUG nova.compute.provider_tree [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.887453] env[65758]: DEBUG nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1028.905671] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661009, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.002750] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.002750] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.041381] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1029.045347] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1633e6cb-e2d9-4b90-a4e0-efd143663af1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.047733] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661010, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171829} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.048592] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1029.049254] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0637c3-4189-41c0-b73c-598a56e5d4ff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.055182] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1029.055182] env[65758]: value = "task-4661012" [ 1029.055182] env[65758]: _type = "Task" [ 1029.055182] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.077531] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 89167b37-4c21-4678-a0f0-5a4ce932c4d8/89167b37-4c21-4678-a0f0-5a4ce932c4d8.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.081433] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81559543-73f5-431f-8d28-dc78100853d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.108514] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661012, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.110448] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1029.110448] env[65758]: value = "task-4661013" [ 1029.110448] env[65758]: _type = "Task" [ 1029.110448] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.117624] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661011, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.118794] env[65758]: DEBUG nova.compute.manager [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1029.119101] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1029.120108] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d68226-4703-4e26-8527-bd3d3be0474b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.127196] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.134146] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1029.134907] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71b91f97-072f-4f4b-bd6b-092c0c22d3ef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.143690] env[65758]: DEBUG oslo_vmware.api [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1029.143690] env[65758]: value = "task-4661014" [ 1029.143690] env[65758]: _type = "Task" [ 1029.143690] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.149598] env[65758]: DEBUG nova.scheduler.client.report [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1029.165042] env[65758]: DEBUG oslo_vmware.api [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4661014, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.168402] env[65758]: INFO nova.compute.manager [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Took 22.89 seconds to build instance. [ 1029.412762] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661009, 'name': ReconfigVM_Task, 'duration_secs': 0.786892} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.413146] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.414748] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b676bd69-cc9d-4116-9723-39f79b5663d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.454077] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.454595] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9980922e-8af0-443c-97d1-0c9ae80fe2bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.474504] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1029.474504] env[65758]: value = "task-4661015" [ 1029.474504] env[65758]: _type = "Task" [ 1029.474504] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.485416] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661015, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.506053] env[65758]: DEBUG nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1029.579131] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661012, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.612077] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661011, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.622347] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.656364] env[65758]: DEBUG oslo_vmware.api [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4661014, 'name': PowerOffVM_Task, 'duration_secs': 0.384132} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.657246] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.659066] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.659066] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3213edaa-d8bd-47b5-a845-b01fff18763b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.668746] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.611s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.669045] env[65758]: INFO nova.compute.manager [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Migrating [ 1029.686470] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.232s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.688944] env[65758]: INFO nova.compute.claims [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.695328] env[65758]: DEBUG oslo_concurrency.lockutils [None req-408d026c-2cfc-414d-90de-5f1cbfc092a3 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "b50b7e64-6f7f-4abc-a4b1-93408a723298" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.418s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.745819] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.745819] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.745819] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Deleting the datastore file [datastore2] fe6f2a15-f42a-4f63-8dfa-175adadf5c02 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.745819] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11816687-18b0-4e7a-a6c8-455b72ab457f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.752908] env[65758]: DEBUG oslo_vmware.api [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for the task: (returnval){ [ 1029.752908] env[65758]: value = "task-4661017" [ 1029.752908] env[65758]: _type = "Task" [ 1029.752908] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.762496] env[65758]: DEBUG oslo_vmware.api [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4661017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.986271] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661015, 'name': ReconfigVM_Task, 'duration_secs': 0.244341} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.987401] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1029.987401] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ead919a-b6e7-42e7-9746-499203c8fd2b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.997107] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1029.997107] env[65758]: value = "task-4661018" [ 1029.997107] env[65758]: _type = "Task" [ 1029.997107] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.007381] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.032808] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.080529] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661012, 'name': CreateSnapshot_Task, 'duration_secs': 0.964134} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.080529] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1030.081283] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4479546-7a1b-4a9a-8ad7-fdbd39e9114f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.111891] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661011, 'name': CloneVM_Task} progress is 95%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.133077] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661013, 'name': ReconfigVM_Task, 'duration_secs': 0.575883} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.133077] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 89167b37-4c21-4678-a0f0-5a4ce932c4d8/89167b37-4c21-4678-a0f0-5a4ce932c4d8.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1030.133077] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41c1e52b-5190-476d-9161-9c8ffa58117f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.135490] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1030.135490] env[65758]: value = "task-4661019" [ 1030.135490] env[65758]: _type = "Task" [ 1030.135490] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.155775] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661019, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.200855] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.201086] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.201434] env[65758]: DEBUG nova.network.neutron [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1030.264408] env[65758]: DEBUG oslo_vmware.api [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Task: {'id': task-4661017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.297181} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.264712] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.265067] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.265338] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.265576] env[65758]: INFO nova.compute.manager [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1030.265907] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1030.266254] env[65758]: DEBUG nova.compute.manager [-] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1030.266419] env[65758]: DEBUG nova.network.neutron [-] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1030.266847] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1030.267505] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1030.267858] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1030.310153] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1030.428582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "b50b7e64-6f7f-4abc-a4b1-93408a723298" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.430250] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "b50b7e64-6f7f-4abc-a4b1-93408a723298" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.430250] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "b50b7e64-6f7f-4abc-a4b1-93408a723298-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.430250] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "b50b7e64-6f7f-4abc-a4b1-93408a723298-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.430250] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "b50b7e64-6f7f-4abc-a4b1-93408a723298-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.431858] env[65758]: INFO nova.compute.manager [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Terminating instance [ 1030.511659] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661018, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.559038] env[65758]: DEBUG nova.compute.manager [req-f70477af-47cb-481b-8d49-936c324c03aa req-17a9141d-f3c9-43d8-8fba-e2a1e190eeb5 service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Received event network-vif-deleted-a6572553-5e5a-4fb9-9384-1cfcd168710b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1030.559038] env[65758]: INFO nova.compute.manager [req-f70477af-47cb-481b-8d49-936c324c03aa req-17a9141d-f3c9-43d8-8fba-e2a1e190eeb5 service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Neutron deleted interface a6572553-5e5a-4fb9-9384-1cfcd168710b; detaching it from the instance and deleting it from the info cache [ 1030.559461] env[65758]: DEBUG nova.network.neutron [req-f70477af-47cb-481b-8d49-936c324c03aa req-17a9141d-f3c9-43d8-8fba-e2a1e190eeb5 service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1030.600937] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1030.601741] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a687dc02-b3bd-4cdb-968b-8aa48279f32e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.615121] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661011, 'name': CloneVM_Task, 'duration_secs': 1.69572} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.616532] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Created linked-clone VM from snapshot [ 1030.616876] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1030.616876] env[65758]: value = "task-4661020" [ 1030.616876] env[65758]: _type = "Task" [ 1030.616876] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.617576] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ead269-6316-44fa-8484-7582597c0eb2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.629528] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Uploading image e470b327-db70-46ad-8e6a-423ba4f01ad7 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1030.636579] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661020, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.643134] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1030.643441] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-68a9f52b-8495-44d7-b342-267238669936 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.651211] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661019, 'name': Rename_Task, 'duration_secs': 0.25517} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.652220] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1030.652630] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-900a90bc-6b67-4e03-aa49-665e420eb473 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.659333] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1030.659333] env[65758]: value = "task-4661021" [ 1030.659333] env[65758]: _type = "Task" [ 1030.659333] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.665911] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1030.665911] env[65758]: value = "task-4661022" [ 1030.665911] env[65758]: _type = "Task" [ 1030.665911] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.674234] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661021, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.682561] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.708068] env[65758]: WARNING neutronclient.v2_0.client [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1030.708487] env[65758]: WARNING openstack [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1030.708953] env[65758]: WARNING openstack [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1030.935982] env[65758]: DEBUG nova.compute.manager [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1030.936327] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.937385] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735f4cc5-3ae2-42ad-814b-a982e3f15a5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.946472] env[65758]: WARNING neutronclient.v2_0.client [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1030.947358] env[65758]: WARNING openstack [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1030.947846] env[65758]: WARNING openstack [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1030.961878] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1030.962460] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a76db178-0b83-4196-98a3-641f87b1b544 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.970827] env[65758]: DEBUG oslo_vmware.api [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1030.970827] env[65758]: value = "task-4661023" [ 1030.970827] env[65758]: _type = "Task" [ 1030.970827] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.987944] env[65758]: DEBUG oslo_vmware.api [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.013512] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661018, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.036558] env[65758]: DEBUG nova.network.neutron [-] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1031.064950] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-344a5aa0-8872-41df-995b-09e400110157 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.068773] env[65758]: DEBUG nova.network.neutron [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1031.075040] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97dbf3c4-e1b6-4dd8-8a57-50eee63d93d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.082463] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816c8551-2fa4-445e-87ac-c11fdcc9f332 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.106393] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c21c2f5-d3c0-40d6-be26-5065626adb6e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.166640] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7923c63-3266-4f2d-a858-5ecc8c8e9a40 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.172117] env[65758]: DEBUG nova.compute.manager [req-f70477af-47cb-481b-8d49-936c324c03aa req-17a9141d-f3c9-43d8-8fba-e2a1e190eeb5 service nova] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Detach interface failed, port_id=a6572553-5e5a-4fb9-9384-1cfcd168710b, reason: Instance fe6f2a15-f42a-4f63-8dfa-175adadf5c02 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1031.182945] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661020, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.189657] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b7ca0c-ec02-4960-bcf8-2747f31548aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.194461] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661021, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.198902] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661022, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.211318] env[65758]: DEBUG nova.compute.provider_tree [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.482229] env[65758]: DEBUG oslo_vmware.api [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661023, 'name': PowerOffVM_Task, 'duration_secs': 0.245458} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.482589] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.482757] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1031.483112] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87349b0f-2afe-48cc-8177-e0096da8d7e1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.511701] env[65758]: DEBUG oslo_vmware.api [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661018, 'name': PowerOnVM_Task, 'duration_secs': 1.345763} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.512034] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.515300] env[65758]: DEBUG nova.compute.manager [None req-3a2305da-ca65-4b11-b84e-e458b42d09b1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1031.516251] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c103ab5a-c68e-4d8f-90fc-e55a8d0f5b33 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.541599] env[65758]: INFO nova.compute.manager [-] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Took 1.28 seconds to deallocate network for instance. [ 1031.557462] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1031.558163] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1031.558163] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleting the datastore file [datastore1] b50b7e64-6f7f-4abc-a4b1-93408a723298 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1031.558299] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62b8c782-0d64-43b5-b8ca-77ad340acc8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.566731] env[65758]: DEBUG oslo_vmware.api [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for the task: (returnval){ [ 1031.566731] env[65758]: value = "task-4661025" [ 1031.566731] env[65758]: _type = "Task" [ 1031.566731] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.572560] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.577626] env[65758]: DEBUG oslo_vmware.api [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.660450] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661020, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.685069] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661021, 'name': Destroy_Task, 'duration_secs': 0.574525} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.687425] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Destroyed the VM [ 1031.687677] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1031.687972] env[65758]: DEBUG oslo_vmware.api [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661022, 'name': PowerOnVM_Task, 'duration_secs': 0.856968} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.688255] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-622f8a0f-aa77-4cb7-9c06-0e38d0e9297b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.689726] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.689947] env[65758]: INFO nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Took 8.48 seconds to spawn the instance on the hypervisor. [ 1031.690145] env[65758]: DEBUG nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1031.690884] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae7377a-b401-46a0-8dd3-b23a53d4aaa6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.701666] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1031.701666] env[65758]: value = "task-4661026" [ 1031.701666] env[65758]: _type = "Task" [ 1031.701666] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.711986] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661026, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.715118] env[65758]: DEBUG nova.scheduler.client.report [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.049971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.088166] env[65758]: DEBUG oslo_vmware.api [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Task: {'id': task-4661025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243945} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.088720] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1032.088907] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1032.089104] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1032.089279] env[65758]: INFO nova.compute.manager [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1032.089529] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1032.089750] env[65758]: DEBUG nova.compute.manager [-] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1032.089888] env[65758]: DEBUG nova.network.neutron [-] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1032.090246] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1032.090884] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1032.091260] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1032.144139] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1032.161532] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661020, 'name': CloneVM_Task} progress is 95%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.214525] env[65758]: INFO nova.compute.manager [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Took 21.62 seconds to build instance. [ 1032.223257] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.223880] env[65758]: DEBUG nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1032.227019] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661026, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.228054] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.195s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.228972] env[65758]: INFO nova.compute.claims [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.597490] env[65758]: DEBUG nova.compute.manager [req-0e9c186e-1947-455b-a262-585d4b89c481 req-115fc15e-7b90-4a88-8ce1-dc2f2c276689 service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Received event network-vif-deleted-4f1651f5-5511-4231-b401-c0eb4cb9f9eb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1032.597490] env[65758]: INFO nova.compute.manager [req-0e9c186e-1947-455b-a262-585d4b89c481 req-115fc15e-7b90-4a88-8ce1-dc2f2c276689 service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Neutron deleted interface 4f1651f5-5511-4231-b401-c0eb4cb9f9eb; detaching it from the instance and deleting it from the info cache [ 1032.597490] env[65758]: DEBUG nova.network.neutron [req-0e9c186e-1947-455b-a262-585d4b89c481 req-115fc15e-7b90-4a88-8ce1-dc2f2c276689 service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1032.663652] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661020, 'name': CloneVM_Task, 'duration_secs': 1.756206} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.664985] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Created linked-clone VM from snapshot [ 1032.665471] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9e6419-d19e-4765-9b5c-a2ea73d38448 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.674557] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Uploading image cc3bbcc2-d5ef-4952-91ea-33f654d86e47 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1032.720450] env[65758]: DEBUG oslo_concurrency.lockutils [None req-845c8ac8-80b8-4a0a-a9df-edde2e0a8bbc tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.137s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.720694] env[65758]: DEBUG oslo_vmware.api [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661026, 'name': RemoveSnapshot_Task, 'duration_secs': 0.842285} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.720973] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1032.733765] env[65758]: DEBUG nova.compute.utils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1032.737304] env[65758]: DEBUG nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1032.737644] env[65758]: DEBUG nova.network.neutron [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1032.737988] env[65758]: WARNING neutronclient.v2_0.client [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1032.738863] env[65758]: WARNING neutronclient.v2_0.client [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1032.739527] env[65758]: WARNING openstack [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1032.739884] env[65758]: WARNING openstack [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1032.800697] env[65758]: DEBUG nova.policy [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f2ed00d7f814d1f907ba5900c8f3025', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16188c7bd36d4b0eaffdc980b71ac727', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1032.803265] env[65758]: INFO nova.compute.manager [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Unrescuing [ 1032.803454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.803600] env[65758]: DEBUG oslo_concurrency.lockutils [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquired lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.803859] env[65758]: DEBUG nova.network.neutron [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1032.916189] env[65758]: DEBUG nova.network.neutron [-] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1033.073093] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f21a89-038b-489f-bc34-e9b20b4e5405 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.082469] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a98655-beee-47c8-aa03-3c88950a6f96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.117287] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fcfaba6a-68f4-4b9a-9342-ad40e6b039e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.120365] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807f8c2d-fd37-4c4d-9e5d-871e5ec820c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.124591] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf2c3f9-b864-4943-99b2-f3378a7088de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.153307] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d53f32b-6bc5-4a28-af07-f6503b680f33 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.158403] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance 'e6159a35-f073-4931-b0b0-832a88680356' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1033.165044] env[65758]: DEBUG oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1033.165044] env[65758]: value = "vm-910023" [ 1033.165044] env[65758]: _type = "VirtualMachine" [ 1033.165044] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1033.167390] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c66280-ff79-4fac-a28b-fb8080e26286 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.179391] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2d56d648-f18b-4523-b58c-0e655c75c312 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.195450] env[65758]: DEBUG nova.compute.provider_tree [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.211695] env[65758]: DEBUG oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease: (returnval){ [ 1033.211695] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52161e8e-c9cc-ef6e-c12d-288d5525f8e9" [ 1033.211695] env[65758]: _type = "HttpNfcLease" [ 1033.211695] env[65758]: } obtained for exporting VM: (result){ [ 1033.211695] env[65758]: value = "vm-910023" [ 1033.211695] env[65758]: _type = "VirtualMachine" [ 1033.211695] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1033.212115] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the lease: (returnval){ [ 1033.212115] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52161e8e-c9cc-ef6e-c12d-288d5525f8e9" [ 1033.212115] env[65758]: _type = "HttpNfcLease" [ 1033.212115] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1033.216132] env[65758]: DEBUG nova.compute.manager [req-0e9c186e-1947-455b-a262-585d4b89c481 req-115fc15e-7b90-4a88-8ce1-dc2f2c276689 service nova] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Detach interface failed, port_id=4f1651f5-5511-4231-b401-c0eb4cb9f9eb, reason: Instance b50b7e64-6f7f-4abc-a4b1-93408a723298 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1033.216132] env[65758]: DEBUG nova.network.neutron [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Successfully created port: 40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1033.219335] env[65758]: DEBUG nova.scheduler.client.report [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.232678] env[65758]: WARNING nova.compute.manager [None req-92d97415-da86-41f5-ac0a-9a4bb7695822 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Image not found during snapshot: nova.exception.ImageNotFound: Image e470b327-db70-46ad-8e6a-423ba4f01ad7 could not be found. [ 1033.240270] env[65758]: DEBUG nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1033.244061] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1033.244061] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52161e8e-c9cc-ef6e-c12d-288d5525f8e9" [ 1033.244061] env[65758]: _type = "HttpNfcLease" [ 1033.244061] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1033.244620] env[65758]: DEBUG oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1033.244620] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52161e8e-c9cc-ef6e-c12d-288d5525f8e9" [ 1033.244620] env[65758]: _type = "HttpNfcLease" [ 1033.244620] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1033.245485] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af094e14-4e65-4135-98e3-c658e70b1ad1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.255432] env[65758]: DEBUG oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527b51ec-aa3f-7e35-de26-752634d2a0f3/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1033.255685] env[65758]: DEBUG oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527b51ec-aa3f-7e35-de26-752634d2a0f3/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1033.318601] env[65758]: WARNING neutronclient.v2_0.client [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1033.319385] env[65758]: WARNING openstack [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1033.319753] env[65758]: WARNING openstack [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1033.419346] env[65758]: INFO nova.compute.manager [-] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Took 1.33 seconds to deallocate network for instance. [ 1033.442261] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9085e213-dea4-4eb0-a58e-8dde92e4c4a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.586008] env[65758]: WARNING neutronclient.v2_0.client [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1033.586741] env[65758]: WARNING openstack [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1033.587099] env[65758]: WARNING openstack [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1033.635067] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.635681] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.636036] env[65758]: INFO nova.compute.manager [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Shelving [ 1033.686089] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.686298] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84b71c88-57a6-441c-842d-61bcd9a3c42e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.694553] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1033.694553] env[65758]: value = "task-4661028" [ 1033.694553] env[65758]: _type = "Task" [ 1033.694553] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.703805] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.726598] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.499s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.727145] env[65758]: DEBUG nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1033.730225] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "9ec1ff52-7fbd-4530-9377-caeff103360b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.730521] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "9ec1ff52-7fbd-4530-9377-caeff103360b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.730799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "9ec1ff52-7fbd-4530-9377-caeff103360b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.731074] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "9ec1ff52-7fbd-4530-9377-caeff103360b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.731298] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "9ec1ff52-7fbd-4530-9377-caeff103360b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.733411] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.684s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.733684] env[65758]: DEBUG nova.objects.instance [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lazy-loading 'resources' on Instance uuid fe6f2a15-f42a-4f63-8dfa-175adadf5c02 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.734950] env[65758]: INFO nova.compute.manager [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Terminating instance [ 1033.766745] env[65758]: DEBUG nova.network.neutron [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [{"id": "216bffab-4451-407d-b8dd-9e8687a90b81", "address": "fa:16:3e:74:3c:81", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216bffab-44", "ovs_interfaceid": "216bffab-4451-407d-b8dd-9e8687a90b81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1033.927057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.206403] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661028, 'name': PowerOffVM_Task, 'duration_secs': 0.289057} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.206817] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.207445] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance 'e6159a35-f073-4931-b0b0-832a88680356' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1034.235974] env[65758]: DEBUG nova.compute.utils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1034.237796] env[65758]: DEBUG nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1034.238092] env[65758]: DEBUG nova.network.neutron [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1034.238440] env[65758]: WARNING neutronclient.v2_0.client [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1034.238865] env[65758]: WARNING neutronclient.v2_0.client [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1034.239504] env[65758]: WARNING openstack [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1034.239904] env[65758]: WARNING openstack [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1034.251081] env[65758]: DEBUG nova.compute.manager [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1034.251332] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1034.252439] env[65758]: DEBUG nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1034.256389] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ef4e8d-d292-4d74-b574-9963dcc7922c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.267273] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.267745] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2e3a946-27e2-4746-adc7-c399ff8dcb7f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.271603] env[65758]: DEBUG oslo_concurrency.lockutils [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Releasing lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.271603] env[65758]: DEBUG nova.objects.instance [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lazy-loading 'flavor' on Instance uuid 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.283046] env[65758]: DEBUG oslo_vmware.api [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1034.283046] env[65758]: value = "task-4661029" [ 1034.283046] env[65758]: _type = "Task" [ 1034.283046] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.295618] env[65758]: DEBUG oslo_vmware.api [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661029, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.322989] env[65758]: DEBUG nova.policy [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3338c19613c041abb681fa6cc661652a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e114eef3998848699a9a086fee86db29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1034.554023] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef8a4cd-0018-4a7e-a168-86b769292d60 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.562802] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8130ebd2-c2ad-4d2f-a714-6c5caa690467 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.601541] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1f1f98-5b7d-458b-b993-9e7df9929ecb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.612094] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e40b11-552d-4ca4-9044-c3c5d22edbc7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.631162] env[65758]: DEBUG nova.compute.provider_tree [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.647678] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.648306] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc016e33-db46-4bcc-8522-af489371eb40 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.658271] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1034.658271] env[65758]: value = "task-4661030" [ 1034.658271] env[65758]: _type = "Task" [ 1034.658271] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.670186] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661030, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.715729] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1034.716092] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1034.716813] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1034.716997] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1034.717197] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1034.717422] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1034.718993] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1034.719114] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1034.719237] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1034.719445] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1034.719633] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1034.727344] env[65758]: DEBUG nova.network.neutron [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Successfully created port: f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1034.731155] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74d8374a-0d88-4c88-85d5-51c0ade195e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.750511] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1034.750511] env[65758]: value = "task-4661031" [ 1034.750511] env[65758]: _type = "Task" [ 1034.750511] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.751182] env[65758]: DEBUG nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1034.772389] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661031, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.780676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47677f7d-0f3b-4c77-90ec-ece9b3b383c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.819624] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.825363] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5fd39233-c2e0-421b-b406-e0c9cf800a30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.827726] env[65758]: DEBUG oslo_vmware.api [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661029, 'name': PowerOffVM_Task, 'duration_secs': 0.210026} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.828837] env[65758]: DEBUG nova.network.neutron [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Successfully updated port: 40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1034.830499] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.830713] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1034.834067] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-626e1937-31fb-4735-ae43-4f3a64da8172 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.842799] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1034.842799] env[65758]: value = "task-4661032" [ 1034.842799] env[65758]: _type = "Task" [ 1034.842799] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.855801] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.926489] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.927726] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.928089] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleting the datastore file [datastore1] 9ec1ff52-7fbd-4530-9377-caeff103360b {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.929775] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efbec223-284b-4881-889d-76786ee4eb44 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.941076] env[65758]: DEBUG oslo_vmware.api [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for the task: (returnval){ [ 1034.941076] env[65758]: value = "task-4661034" [ 1034.941076] env[65758]: _type = "Task" [ 1034.941076] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.952073] env[65758]: DEBUG oslo_vmware.api [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.983332] env[65758]: DEBUG nova.compute.manager [req-c25b65f7-7629-4705-98f9-48798c1e731e req-ddb45eb0-376f-450f-8264-cd6c01649a29 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Received event network-vif-plugged-40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1034.983706] env[65758]: DEBUG oslo_concurrency.lockutils [req-c25b65f7-7629-4705-98f9-48798c1e731e req-ddb45eb0-376f-450f-8264-cd6c01649a29 service nova] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.984068] env[65758]: DEBUG oslo_concurrency.lockutils [req-c25b65f7-7629-4705-98f9-48798c1e731e req-ddb45eb0-376f-450f-8264-cd6c01649a29 service nova] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.984405] env[65758]: DEBUG oslo_concurrency.lockutils [req-c25b65f7-7629-4705-98f9-48798c1e731e req-ddb45eb0-376f-450f-8264-cd6c01649a29 service nova] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.984666] env[65758]: DEBUG nova.compute.manager [req-c25b65f7-7629-4705-98f9-48798c1e731e req-ddb45eb0-376f-450f-8264-cd6c01649a29 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] No waiting events found dispatching network-vif-plugged-40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1034.984899] env[65758]: WARNING nova.compute.manager [req-c25b65f7-7629-4705-98f9-48798c1e731e req-ddb45eb0-376f-450f-8264-cd6c01649a29 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Received unexpected event network-vif-plugged-40ae9fbf-7f23-48e1-bd47-7de2b62ace7e for instance with vm_state building and task_state spawning. [ 1035.135963] env[65758]: DEBUG nova.scheduler.client.report [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.173071] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661030, 'name': PowerOffVM_Task, 'duration_secs': 0.298163} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.173380] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.174395] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b6a240-6806-43e2-82a0-d1123b3414d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.205946] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bee77a-5036-40ef-b619-d60aac332302 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.269641] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661031, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.338769] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.339304] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.339565] env[65758]: DEBUG nova.network.neutron [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1035.355755] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661032, 'name': PowerOffVM_Task, 'duration_secs': 0.375143} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.357075] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.362737] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfiguring VM instance instance-0000003b to detach disk 2002 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1035.363593] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-067537d8-51ce-4d5d-9779-0eb7af27cf4a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.386109] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1035.386109] env[65758]: value = "task-4661035" [ 1035.386109] env[65758]: _type = "Task" [ 1035.386109] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.395775] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661035, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.453735] env[65758]: DEBUG oslo_vmware.api [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Task: {'id': task-4661034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.394606} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.454073] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.454264] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1035.454435] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1035.454603] env[65758]: INFO nova.compute.manager [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1035.454890] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1035.455107] env[65758]: DEBUG nova.compute.manager [-] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1035.455204] env[65758]: DEBUG nova.network.neutron [-] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1035.455458] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1035.455997] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1035.456291] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1035.497417] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1035.642775] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.646034] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.719s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.646034] env[65758]: DEBUG nova.objects.instance [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lazy-loading 'resources' on Instance uuid b50b7e64-6f7f-4abc-a4b1-93408a723298 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.671924] env[65758]: INFO nova.scheduler.client.report [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Deleted allocations for instance fe6f2a15-f42a-4f63-8dfa-175adadf5c02 [ 1035.720228] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1035.720553] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-eada8d6f-b269-4e39-8faa-14c1497d08ec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.731857] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1035.731857] env[65758]: value = "task-4661036" [ 1035.731857] env[65758]: _type = "Task" [ 1035.731857] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.745790] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661036, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.763601] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661031, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.772865] env[65758]: DEBUG nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1035.844602] env[65758]: WARNING openstack [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1035.845302] env[65758]: WARNING openstack [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1035.897401] env[65758]: DEBUG nova.network.neutron [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1035.909849] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661035, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.089619] env[65758]: WARNING neutronclient.v2_0.client [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1036.090148] env[65758]: WARNING openstack [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1036.090561] env[65758]: WARNING openstack [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1036.183173] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c87264a3-9c8e-45a5-947a-ef0d2af0edf9 tempest-InstanceActionsTestJSON-333792801 tempest-InstanceActionsTestJSON-333792801-project-member] Lock "fe6f2a15-f42a-4f63-8dfa-175adadf5c02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.588s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.213998] env[65758]: DEBUG nova.network.neutron [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updating instance_info_cache with network_info: [{"id": "40ae9fbf-7f23-48e1-bd47-7de2b62ace7e", "address": "fa:16:3e:c9:90:4c", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ae9fbf-7f", "ovs_interfaceid": "40ae9fbf-7f23-48e1-bd47-7de2b62ace7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1036.239154] env[65758]: DEBUG nova.network.neutron [-] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1036.249611] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661036, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.268041] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661031, 'name': ReconfigVM_Task, 'duration_secs': 1.254665} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.268041] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance 'e6159a35-f073-4931-b0b0-832a88680356' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1036.372207] env[65758]: DEBUG nova.compute.manager [req-295bbde4-5463-4e82-bf0d-03e276adced6 req-63964672-ba0c-44f4-aeaa-e8a521ce51e0 service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Received event network-vif-plugged-f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1036.372207] env[65758]: DEBUG oslo_concurrency.lockutils [req-295bbde4-5463-4e82-bf0d-03e276adced6 req-63964672-ba0c-44f4-aeaa-e8a521ce51e0 service nova] Acquiring lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.372492] env[65758]: DEBUG oslo_concurrency.lockutils [req-295bbde4-5463-4e82-bf0d-03e276adced6 req-63964672-ba0c-44f4-aeaa-e8a521ce51e0 service nova] Lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.372492] env[65758]: DEBUG oslo_concurrency.lockutils [req-295bbde4-5463-4e82-bf0d-03e276adced6 req-63964672-ba0c-44f4-aeaa-e8a521ce51e0 service nova] Lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.372610] env[65758]: DEBUG nova.compute.manager [req-295bbde4-5463-4e82-bf0d-03e276adced6 req-63964672-ba0c-44f4-aeaa-e8a521ce51e0 service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] No waiting events found dispatching network-vif-plugged-f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1036.372775] env[65758]: WARNING nova.compute.manager [req-295bbde4-5463-4e82-bf0d-03e276adced6 req-63964672-ba0c-44f4-aeaa-e8a521ce51e0 service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Received unexpected event network-vif-plugged-f30ab0a1-5ab3-4e16-a881-f850a8fd4399 for instance with vm_state building and task_state spawning. [ 1036.402862] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661035, 'name': ReconfigVM_Task, 'duration_secs': 0.534496} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.403062] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfigured VM instance instance-0000003b to detach disk 2002 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1036.403151] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1036.403399] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b46f7400-d20f-44a2-91bd-9c116052e6d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.413632] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1036.413632] env[65758]: value = "task-4661037" [ 1036.413632] env[65758]: _type = "Task" [ 1036.413632] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.428193] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661037, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.455737] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca6585c-e074-431f-8841-a1d278f2b6f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.466526] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7711c7d4-385f-4506-b4e7-a2027c6fcb66 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.508495] env[65758]: DEBUG nova.network.neutron [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Successfully updated port: f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1036.510508] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9da7c9c-c63a-4ac4-8705-7f76283bf5f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.525516] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1938627-bc2f-4ba0-85df-1d639af099ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.542235] env[65758]: DEBUG nova.compute.provider_tree [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.723380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Releasing lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.723906] env[65758]: DEBUG nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Instance network_info: |[{"id": "40ae9fbf-7f23-48e1-bd47-7de2b62ace7e", "address": "fa:16:3e:c9:90:4c", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ae9fbf-7f", "ovs_interfaceid": "40ae9fbf-7f23-48e1-bd47-7de2b62ace7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1036.746059] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661036, 'name': CreateSnapshot_Task, 'duration_secs': 0.968754} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.746059] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1036.746438] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccba4ab-84e1-48fe-a9d7-2270ba06b168 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.749426] env[65758]: INFO nova.compute.manager [-] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Took 1.29 seconds to deallocate network for instance. [ 1036.776448] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1036.776659] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1036.776759] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1036.776904] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1036.777052] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1036.777208] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1036.777445] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1036.777605] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1036.777766] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1036.777975] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1036.778234] env[65758]: DEBUG nova.virt.hardware [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1036.784022] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Reconfiguring VM instance instance-00000026 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1036.784420] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6ac0a1a-2017-489e-bdbd-1096d7566a86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.806745] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1036.806745] env[65758]: value = "task-4661038" [ 1036.806745] env[65758]: _type = "Task" [ 1036.806745] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.819788] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.927932] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661037, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.017458] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.017645] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.017817] env[65758]: DEBUG nova.network.neutron [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1037.039249] env[65758]: DEBUG nova.compute.manager [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Received event network-changed-40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1037.039934] env[65758]: DEBUG nova.compute.manager [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Refreshing instance network info cache due to event network-changed-40ae9fbf-7f23-48e1-bd47-7de2b62ace7e. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1037.040163] env[65758]: DEBUG oslo_concurrency.lockutils [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] Acquiring lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.040343] env[65758]: DEBUG oslo_concurrency.lockutils [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] Acquired lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.040515] env[65758]: DEBUG nova.network.neutron [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Refreshing network info cache for port 40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1037.046904] env[65758]: DEBUG nova.scheduler.client.report [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1037.264019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.275595] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1037.276034] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e46aef66-3338-4850-8d88-b4d7dc0a2623 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.287973] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1037.287973] env[65758]: value = "task-4661039" [ 1037.287973] env[65758]: _type = "Task" [ 1037.287973] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.298918] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661039, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.321033] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661038, 'name': ReconfigVM_Task, 'duration_secs': 0.222024} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.321033] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Reconfigured VM instance instance-00000026 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1037.321033] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b071fc-f679-4a2d-a9cd-a3f527208568 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.347807] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] e6159a35-f073-4931-b0b0-832a88680356/e6159a35-f073-4931-b0b0-832a88680356.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.348186] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-daf4e6c5-e710-4a2a-9704-d5e2eb7d0436 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.370912] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1037.370912] env[65758]: value = "task-4661040" [ 1037.370912] env[65758]: _type = "Task" [ 1037.370912] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.383939] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661040, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.427922] env[65758]: DEBUG oslo_vmware.api [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661037, 'name': PowerOnVM_Task, 'duration_secs': 0.562193} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.427922] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.428178] env[65758]: DEBUG nova.compute.manager [None req-14fa5d32-820d-4d02-9286-0434e9637845 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1037.428995] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51f483c-181b-40f1-9dce-c8e76fc33610 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.520929] env[65758]: WARNING openstack [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1037.523684] env[65758]: WARNING openstack [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1037.543422] env[65758]: WARNING neutronclient.v2_0.client [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1037.544281] env[65758]: WARNING openstack [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1037.544649] env[65758]: WARNING openstack [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1037.553222] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.908s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.557090] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.293s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.557090] env[65758]: DEBUG nova.objects.instance [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lazy-loading 'resources' on Instance uuid 9ec1ff52-7fbd-4530-9377-caeff103360b {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.587523] env[65758]: INFO nova.scheduler.client.report [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Deleted allocations for instance b50b7e64-6f7f-4abc-a4b1-93408a723298 [ 1037.593959] env[65758]: DEBUG nova.network.neutron [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1037.799917] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661039, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.801269] env[65758]: WARNING neutronclient.v2_0.client [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1037.802013] env[65758]: WARNING openstack [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1037.802438] env[65758]: WARNING openstack [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1037.885338] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661040, 'name': ReconfigVM_Task, 'duration_secs': 0.372211} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.885510] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Reconfigured VM instance instance-00000026 to attach disk [datastore2] e6159a35-f073-4931-b0b0-832a88680356/e6159a35-f073-4931-b0b0-832a88680356.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.885841] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance 'e6159a35-f073-4931-b0b0-832a88680356' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1038.026444] env[65758]: DEBUG nova.network.neutron [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Updating instance_info_cache with network_info: [{"id": "f30ab0a1-5ab3-4e16-a881-f850a8fd4399", "address": "fa:16:3e:39:c5:f9", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30ab0a1-5a", "ovs_interfaceid": "f30ab0a1-5ab3-4e16-a881-f850a8fd4399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1038.064775] env[65758]: WARNING neutronclient.v2_0.client [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1038.065512] env[65758]: WARNING openstack [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1038.065953] env[65758]: WARNING openstack [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1038.106224] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b0040990-e149-491e-beba-5fb54ea19ec6 tempest-ServerDiskConfigTestJSON-1667569941 tempest-ServerDiskConfigTestJSON-1667569941-project-member] Lock "b50b7e64-6f7f-4abc-a4b1-93408a723298" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.677s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.161228] env[65758]: DEBUG nova.network.neutron [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updated VIF entry in instance network info cache for port 40ae9fbf-7f23-48e1-bd47-7de2b62ace7e. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1038.161330] env[65758]: DEBUG nova.network.neutron [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updating instance_info_cache with network_info: [{"id": "40ae9fbf-7f23-48e1-bd47-7de2b62ace7e", "address": "fa:16:3e:c9:90:4c", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ae9fbf-7f", "ovs_interfaceid": "40ae9fbf-7f23-48e1-bd47-7de2b62ace7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1038.303235] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661039, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.339230] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55035421-6060-4780-b191-bcdf78daf57e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.349763] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806f560d-871b-468f-8a88-d7f79d151798 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.402083] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62f53e4-7a3e-4002-8122-9eaa961caeae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.404640] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0a6659-94c8-4514-b71f-aa98142cff39 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.436229] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3005327c-a187-46ca-afd8-b04874709625 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.440997] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b78954e-c2d9-493f-9dc2-d758bd5842d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.444779] env[65758]: DEBUG nova.compute.manager [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Received event network-changed-f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1038.444965] env[65758]: DEBUG nova.compute.manager [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Refreshing instance network info cache due to event network-changed-f30ab0a1-5ab3-4e16-a881-f850a8fd4399. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1038.445170] env[65758]: DEBUG oslo_concurrency.lockutils [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] Acquiring lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.459540] env[65758]: DEBUG nova.compute.provider_tree [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.480086] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance 'e6159a35-f073-4931-b0b0-832a88680356' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1038.486536] env[65758]: DEBUG nova.scheduler.client.report [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.530335] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.530829] env[65758]: DEBUG nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Instance network_info: |[{"id": "f30ab0a1-5ab3-4e16-a881-f850a8fd4399", "address": "fa:16:3e:39:c5:f9", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30ab0a1-5a", "ovs_interfaceid": "f30ab0a1-5ab3-4e16-a881-f850a8fd4399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1038.531828] env[65758]: DEBUG oslo_concurrency.lockutils [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] Acquired lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.531938] env[65758]: DEBUG nova.network.neutron [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Refreshing network info cache for port f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1038.664680] env[65758]: DEBUG oslo_concurrency.lockutils [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] Releasing lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.664851] env[65758]: DEBUG nova.compute.manager [req-ba7df502-02d7-4a71-8505-47cdd10f0562 req-f2d7e55d-8730-4e73-b219-0c6cf923e0d0 service nova] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Received event network-vif-deleted-295a6d0b-82a8-470a-8be9-077f59f98374 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1038.801335] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661039, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.992575] env[65758]: WARNING neutronclient.v2_0.client [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1038.992944] env[65758]: WARNING neutronclient.v2_0.client [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1038.995925] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.440s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.033849] env[65758]: INFO nova.scheduler.client.report [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Deleted allocations for instance 9ec1ff52-7fbd-4530-9377-caeff103360b [ 1039.036030] env[65758]: WARNING neutronclient.v2_0.client [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1039.037104] env[65758]: WARNING openstack [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1039.038116] env[65758]: WARNING openstack [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1039.204103] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1039.204641] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.205101] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1039.205441] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.205826] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1039.206156] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1039.207807] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1039.207807] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1039.207807] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1039.207807] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1039.207807] env[65758]: DEBUG nova.virt.hardware [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1039.211050] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe7c882-eb80-496a-9463-8c2a35fd6e05 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.224988] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1039.225284] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.225439] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1039.225638] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.225786] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1039.225926] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1039.226155] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1039.226329] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1039.226494] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1039.226648] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1039.226840] env[65758]: DEBUG nova.virt.hardware [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1039.228305] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c5d966-caaa-4cbf-bd49-a356d7a60459 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.234232] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79180e73-4c98-47d1-a0bf-e04487ae1ddb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.248885] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:90:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40ae9fbf-7f23-48e1-bd47-7de2b62ace7e', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.257061] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1039.259986] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212a21f-c0fc-c50e-edef-bc796d1ba8d5/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1039.263339] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.264408] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2bba90-e949-4299-98c3-9223b3ce33b8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.268493] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44d4c561-013f-48fb-8b73-e69d52007389 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.285614] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0374224-6d96-4a8e-b1e1-469e6a62c4a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.305183] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:c5:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f30ab0a1-5ab3-4e16-a881-f850a8fd4399', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.313203] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1039.313562] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212a21f-c0fc-c50e-edef-bc796d1ba8d5/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1039.313735] env[65758]: ERROR oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212a21f-c0fc-c50e-edef-bc796d1ba8d5/disk-0.vmdk due to incomplete transfer. [ 1039.316149] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.316491] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-536981cd-cb2e-4af0-a7d8-b19112e783bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.318521] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.318521] env[65758]: value = "task-4661041" [ 1039.318521] env[65758]: _type = "Task" [ 1039.318521] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.322121] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16cacb7e-add2-49c2-99e1-6b56698876c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.338495] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661039, 'name': CloneVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.345824] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5212a21f-c0fc-c50e-edef-bc796d1ba8d5/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1039.346055] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Uploaded image df203c52-cb8e-4277-903c-c114ae8627be to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1039.348813] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1039.354198] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5dc98f05-6c41-459d-8f06-9b305265eed0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.356171] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.356171] env[65758]: value = "task-4661042" [ 1039.356171] env[65758]: _type = "Task" [ 1039.356171] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.356430] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661041, 'name': CreateVM_Task} progress is 15%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.364091] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1039.364091] env[65758]: value = "task-4661043" [ 1039.364091] env[65758]: _type = "Task" [ 1039.364091] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.371459] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661042, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.385534] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661043, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.485062] env[65758]: DEBUG nova.network.neutron [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Port b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 1039.556350] env[65758]: DEBUG oslo_concurrency.lockutils [None req-866e8ace-0f05-4f7f-9c0a-2cfba05feef5 tempest-ImagesTestJSON-2085877570 tempest-ImagesTestJSON-2085877570-project-member] Lock "9ec1ff52-7fbd-4530-9377-caeff103360b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.826s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.793331] env[65758]: DEBUG nova.compute.manager [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Received event network-changed-216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1039.794206] env[65758]: DEBUG nova.compute.manager [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Refreshing instance network info cache due to event network-changed-216bffab-4451-407d-b8dd-9e8687a90b81. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1039.794206] env[65758]: DEBUG oslo_concurrency.lockutils [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] Acquiring lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.794206] env[65758]: DEBUG oslo_concurrency.lockutils [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] Acquired lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.794206] env[65758]: DEBUG nova.network.neutron [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Refreshing network info cache for port 216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1039.807053] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661039, 'name': CloneVM_Task, 'duration_secs': 2.088581} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.809649] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Created linked-clone VM from snapshot [ 1039.809649] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313d8a2c-2105-4b95-a299-f6a08db53320 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.820866] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Uploading image b02ef41d-27e9-450f-ae97-a90537c4af36 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1039.855199] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661041, 'name': CreateVM_Task, 'duration_secs': 0.433879} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.857695] env[65758]: DEBUG oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1039.857695] env[65758]: value = "vm-910025" [ 1039.857695] env[65758]: _type = "VirtualMachine" [ 1039.857695] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1039.857943] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.858241] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-be849ebd-86f2-4011-a684-a6400ab67b8a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.865495] env[65758]: WARNING neutronclient.v2_0.client [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1039.865850] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.866015] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.866474] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1039.871703] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-241726fc-8d15-4d50-a7b0-55f3039ae0c8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.886985] env[65758]: DEBUG oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lease: (returnval){ [ 1039.886985] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f0b7ef-09c3-0aab-d67e-382428d65f05" [ 1039.886985] env[65758]: _type = "HttpNfcLease" [ 1039.886985] env[65758]: } obtained for exporting VM: (result){ [ 1039.886985] env[65758]: value = "vm-910025" [ 1039.886985] env[65758]: _type = "VirtualMachine" [ 1039.886985] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1039.887360] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the lease: (returnval){ [ 1039.887360] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f0b7ef-09c3-0aab-d67e-382428d65f05" [ 1039.887360] env[65758]: _type = "HttpNfcLease" [ 1039.887360] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1039.887535] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661042, 'name': CreateVM_Task, 'duration_secs': 0.45197} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.889337] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.889701] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1039.889701] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526f9bbb-5276-62d2-8a94-13197c2ea685" [ 1039.889701] env[65758]: _type = "Task" [ 1039.889701] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.895945] env[65758]: WARNING neutronclient.v2_0.client [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1039.895945] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.902285] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661043, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.911135] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526f9bbb-5276-62d2-8a94-13197c2ea685, 'name': SearchDatastore_Task, 'duration_secs': 0.016373} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.911472] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1039.911472] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f0b7ef-09c3-0aab-d67e-382428d65f05" [ 1039.911472] env[65758]: _type = "HttpNfcLease" [ 1039.911472] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1039.911770] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.912368] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1039.912368] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.912491] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.912568] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.913428] env[65758]: DEBUG oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1039.913428] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f0b7ef-09c3-0aab-d67e-382428d65f05" [ 1039.913428] env[65758]: _type = "HttpNfcLease" [ 1039.913428] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1039.913428] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.913428] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1039.913668] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07b9ac21-2bef-44fb-a752-0ee390c0274d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.917061] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d23fcb1-b552-47b8-86e5-706db919229b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.920242] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da39102c-1907-4222-8010-20846f028464 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.927578] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1039.927578] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5204be9b-8217-2f22-a4d6-50f15c0b61a9" [ 1039.927578] env[65758]: _type = "Task" [ 1039.927578] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.931463] env[65758]: DEBUG oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab67bd-6a89-314d-dfed-d3d5d6b8e517/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1039.931527] env[65758]: DEBUG oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab67bd-6a89-314d-dfed-d3d5d6b8e517/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1039.937455] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.937455] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1039.941024] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f07ab6e-835d-4415-82e1-b6bfd8c4c868 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.014376] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5204be9b-8217-2f22-a4d6-50f15c0b61a9, 'name': SearchDatastore_Task, 'duration_secs': 0.02066} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.015303] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.015653] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.015894] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.018542] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1040.018542] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a36840-032c-e0e7-5621-5371246109c2" [ 1040.018542] env[65758]: _type = "Task" [ 1040.018542] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.032645] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a36840-032c-e0e7-5621-5371246109c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.059797] env[65758]: WARNING neutronclient.v2_0.client [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1040.059797] env[65758]: WARNING openstack [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1040.060168] env[65758]: WARNING openstack [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1040.083266] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b5a977b1-df98-4dce-ab14-f60d0e4d7a59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.159384] env[65758]: DEBUG nova.network.neutron [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Updated VIF entry in instance network info cache for port f30ab0a1-5ab3-4e16-a881-f850a8fd4399. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1040.159787] env[65758]: DEBUG nova.network.neutron [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Updating instance_info_cache with network_info: [{"id": "f30ab0a1-5ab3-4e16-a881-f850a8fd4399", "address": "fa:16:3e:39:c5:f9", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30ab0a1-5a", "ovs_interfaceid": "f30ab0a1-5ab3-4e16-a881-f850a8fd4399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1040.301097] env[65758]: WARNING neutronclient.v2_0.client [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1040.301877] env[65758]: WARNING openstack [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1040.302173] env[65758]: WARNING openstack [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1040.386757] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661043, 'name': Destroy_Task, 'duration_secs': 0.597736} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.387153] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Destroyed the VM [ 1040.387483] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1040.387880] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c42ab4e7-f5f8-45a7-b7e2-99a631abb3ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.399291] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1040.399291] env[65758]: value = "task-4661045" [ 1040.399291] env[65758]: _type = "Task" [ 1040.399291] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.409248] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661045, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.525579] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "e6159a35-f073-4931-b0b0-832a88680356-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.525579] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.003s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.525862] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.544992] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a36840-032c-e0e7-5621-5371246109c2, 'name': SearchDatastore_Task, 'duration_secs': 0.020424} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.548822] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd73a61d-0cb4-4932-9612-9e564a3b392f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.557876] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1040.557876] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526ff455-7626-e59b-b5db-bcc1ce7b730e" [ 1040.557876] env[65758]: _type = "Task" [ 1040.557876] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.577375] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526ff455-7626-e59b-b5db-bcc1ce7b730e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.662918] env[65758]: DEBUG oslo_concurrency.lockutils [req-3a595913-2195-43fe-88c1-f1caeae33492 req-b8602490-a92a-4c52-a35b-f177be00f18c service nova] Releasing lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.910984] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661045, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.074460] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526ff455-7626-e59b-b5db-bcc1ce7b730e, 'name': SearchDatastore_Task, 'duration_secs': 0.019969} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.074986] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.075535] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] ade1d760-e3e7-49c8-ba9d-b4829ca60841/ade1d760-e3e7-49c8-ba9d-b4829ca60841.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.075669] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.075990] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.076446] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c34224f7-c599-49cf-b715-c8dea7b6fced {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.079036] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d76293ac-90e9-4a4d-815a-7b70e3cf0414 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.088367] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1041.088367] env[65758]: value = "task-4661046" [ 1041.088367] env[65758]: _type = "Task" [ 1041.088367] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.094166] env[65758]: WARNING neutronclient.v2_0.client [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1041.095098] env[65758]: WARNING openstack [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1041.095670] env[65758]: WARNING openstack [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1041.108922] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.109427] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.111875] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1144b9b3-bdfc-4c4a-9501-1a2dcf62b40d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.121586] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.126574] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1041.126574] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522a48be-6c77-3e08-0366-2c9090f68a8a" [ 1041.126574] env[65758]: _type = "Task" [ 1041.126574] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.140088] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522a48be-6c77-3e08-0366-2c9090f68a8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.415768] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661045, 'name': RemoveSnapshot_Task, 'duration_secs': 0.770053} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.415768] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1041.415997] env[65758]: DEBUG nova.compute.manager [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1041.416889] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a55c25-f25c-4d50-8d0d-837425c2849a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.443151] env[65758]: DEBUG nova.network.neutron [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updated VIF entry in instance network info cache for port 216bffab-4451-407d-b8dd-9e8687a90b81. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1041.443575] env[65758]: DEBUG nova.network.neutron [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [{"id": "216bffab-4451-407d-b8dd-9e8687a90b81", "address": "fa:16:3e:74:3c:81", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216bffab-44", "ovs_interfaceid": "216bffab-4451-407d-b8dd-9e8687a90b81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1041.535618] env[65758]: WARNING neutronclient.v2_0.client [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1041.588655] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.588655] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.588655] env[65758]: DEBUG nova.network.neutron [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1041.603815] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661046, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.640570] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522a48be-6c77-3e08-0366-2c9090f68a8a, 'name': SearchDatastore_Task, 'duration_secs': 0.026749} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.641986] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90811667-8984-4f60-8b30-2b97f78772a8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.649641] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1041.649641] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52772aef-a4ff-2780-dfb8-b0f733ffac66" [ 1041.649641] env[65758]: _type = "Task" [ 1041.649641] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.662596] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52772aef-a4ff-2780-dfb8-b0f733ffac66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.835135] env[65758]: DEBUG nova.compute.manager [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Received event network-changed-216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1041.835135] env[65758]: DEBUG nova.compute.manager [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Refreshing instance network info cache due to event network-changed-216bffab-4451-407d-b8dd-9e8687a90b81. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1041.835261] env[65758]: DEBUG oslo_concurrency.lockutils [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] Acquiring lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.932134] env[65758]: INFO nova.compute.manager [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Shelve offloading [ 1041.945947] env[65758]: DEBUG oslo_concurrency.lockutils [req-6b406979-06ab-45da-8722-46152fede810 req-2b300ffd-b70d-4eeb-8c7d-7806c13a352e service nova] Releasing lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.946512] env[65758]: DEBUG oslo_concurrency.lockutils [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] Acquired lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.946727] env[65758]: DEBUG nova.network.neutron [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Refreshing network info cache for port 216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1042.097978] env[65758]: WARNING neutronclient.v2_0.client [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1042.098658] env[65758]: WARNING openstack [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1042.099102] env[65758]: WARNING openstack [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1042.112411] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661046, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686683} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.112580] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] ade1d760-e3e7-49c8-ba9d-b4829ca60841/ade1d760-e3e7-49c8-ba9d-b4829ca60841.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.112792] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.113149] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-093a9728-4862-4420-9ea6-550f9e7020d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.124196] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1042.124196] env[65758]: value = "task-4661047" [ 1042.124196] env[65758]: _type = "Task" [ 1042.124196] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.135461] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.163418] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52772aef-a4ff-2780-dfb8-b0f733ffac66, 'name': SearchDatastore_Task, 'duration_secs': 0.053468} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.164020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.164020] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 31b7d1ee-58c1-47f3-a862-0bc5cb17addc/31b7d1ee-58c1-47f3-a862-0bc5cb17addc.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1042.164312] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec6b943a-e544-412f-a01e-8cd3d7fc84c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.174177] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1042.174177] env[65758]: value = "task-4661048" [ 1042.174177] env[65758]: _type = "Task" [ 1042.174177] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.184567] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.345711] env[65758]: WARNING neutronclient.v2_0.client [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1042.346389] env[65758]: WARNING openstack [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1042.346649] env[65758]: WARNING openstack [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1042.437124] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.437529] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b82cadf9-49fe-4719-80a0-ebd20f8509bc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.451863] env[65758]: WARNING neutronclient.v2_0.client [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1042.452562] env[65758]: WARNING openstack [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1042.452946] env[65758]: WARNING openstack [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1042.462613] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1042.462613] env[65758]: value = "task-4661049" [ 1042.462613] env[65758]: _type = "Task" [ 1042.462613] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.476138] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1042.476423] env[65758]: DEBUG nova.compute.manager [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1042.477271] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a186d9c7-1726-42e0-83bb-a251e43c08e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.485240] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.485440] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.485626] env[65758]: DEBUG nova.network.neutron [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1042.551315] env[65758]: DEBUG nova.network.neutron [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1042.637244] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08315} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.637564] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1042.638582] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f87a5a3-57bb-483f-9d8c-39c335d315c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.664486] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] ade1d760-e3e7-49c8-ba9d-b4829ca60841/ade1d760-e3e7-49c8-ba9d-b4829ca60841.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1042.669899] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebc184e6-8a72-40a4-911f-fe13d29cea32 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.695990] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661048, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.696893] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1042.696893] env[65758]: value = "task-4661050" [ 1042.696893] env[65758]: _type = "Task" [ 1042.696893] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.877857] env[65758]: WARNING neutronclient.v2_0.client [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1042.878831] env[65758]: WARNING openstack [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1042.879249] env[65758]: WARNING openstack [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1042.990172] env[65758]: WARNING neutronclient.v2_0.client [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1042.990943] env[65758]: WARNING openstack [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1042.991358] env[65758]: WARNING openstack [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1043.016051] env[65758]: DEBUG nova.network.neutron [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updated VIF entry in instance network info cache for port 216bffab-4451-407d-b8dd-9e8687a90b81. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1043.016503] env[65758]: DEBUG nova.network.neutron [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [{"id": "216bffab-4451-407d-b8dd-9e8687a90b81", "address": "fa:16:3e:74:3c:81", "network": {"id": "461d3e7f-ff10-460c-b848-08f86023c005", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-680783541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "693b129cd84f4eee9971e7221e92c3e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216bffab-44", "ovs_interfaceid": "216bffab-4451-407d-b8dd-9e8687a90b81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1043.056869] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.202141] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719547} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.204184] env[65758]: WARNING neutronclient.v2_0.client [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1043.204908] env[65758]: WARNING openstack [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1043.207670] env[65758]: WARNING openstack [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1043.216054] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 31b7d1ee-58c1-47f3-a862-0bc5cb17addc/31b7d1ee-58c1-47f3-a862-0bc5cb17addc.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.216288] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.216868] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b99c32b0-04a0-4df8-a18d-d691fa8d933c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.226958] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661050, 'name': ReconfigVM_Task, 'duration_secs': 0.38243} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.227578] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfigured VM instance instance-0000005f to attach disk [datastore2] ade1d760-e3e7-49c8-ba9d-b4829ca60841/ade1d760-e3e7-49c8-ba9d-b4829ca60841.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.228385] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1043.228385] env[65758]: value = "task-4661051" [ 1043.228385] env[65758]: _type = "Task" [ 1043.228385] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.228602] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8146856-0ab5-4810-855b-2ee7a4265715 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.242139] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.243795] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1043.243795] env[65758]: value = "task-4661052" [ 1043.243795] env[65758]: _type = "Task" [ 1043.243795] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.253509] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661052, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.359577] env[65758]: DEBUG nova.network.neutron [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [{"id": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "address": "fa:16:3e:01:98:57", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c394c9-9b", "ovs_interfaceid": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1043.520590] env[65758]: DEBUG oslo_concurrency.lockutils [req-fd2bf06e-bee0-4c11-b00a-df32d303a52f req-fb88ad4c-366e-48e3-90be-8f06bf40aaf4 service nova] Releasing lock "refresh_cache-4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.587568] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88eb4570-213d-4dc8-b5d8-8b03d6363de1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.609141] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8e99c4-547e-4196-a4f6-67cc750492fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.617638] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance 'e6159a35-f073-4931-b0b0-832a88680356' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1043.741518] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085143} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.741749] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.742767] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6747c2-c8c7-4978-bfe5-395f13d5ab15 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.755608] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661052, 'name': Rename_Task, 'duration_secs': 0.172188} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.765194] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1043.774819] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 31b7d1ee-58c1-47f3-a862-0bc5cb17addc/31b7d1ee-58c1-47f3-a862-0bc5cb17addc.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.775233] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95effc5f-f8a9-455a-8af3-c216271c8af3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.777579] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97d58388-188f-4cdb-ae42-d0d00170513d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.799829] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1043.799829] env[65758]: value = "task-4661054" [ 1043.799829] env[65758]: _type = "Task" [ 1043.799829] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.800187] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1043.800187] env[65758]: value = "task-4661053" [ 1043.800187] env[65758]: _type = "Task" [ 1043.800187] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.816360] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661053, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.820164] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661054, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.863457] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.864510] env[65758]: WARNING neutronclient.v2_0.client [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1043.866415] env[65758]: WARNING openstack [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1043.869258] env[65758]: WARNING openstack [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1043.876516] env[65758]: WARNING neutronclient.v2_0.client [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1044.077602] env[65758]: DEBUG oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527b51ec-aa3f-7e35-de26-752634d2a0f3/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1044.078602] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224d9198-a038-440b-a1b0-f5f8500ff90f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.093893] env[65758]: DEBUG oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527b51ec-aa3f-7e35-de26-752634d2a0f3/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1044.093893] env[65758]: ERROR oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527b51ec-aa3f-7e35-de26-752634d2a0f3/disk-0.vmdk due to incomplete transfer. [ 1044.093893] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1091da04-3442-40c5-8651-acfa70f77c46 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.102085] env[65758]: DEBUG oslo_vmware.rw_handles [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527b51ec-aa3f-7e35-de26-752634d2a0f3/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1044.102602] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Uploaded image cc3bbcc2-d5ef-4952-91ea-33f654d86e47 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1044.104682] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1044.105515] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-81cdf4b9-80bd-4ed1-83f5-8be4347abe53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.116023] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1044.116023] env[65758]: value = "task-4661055" [ 1044.116023] env[65758]: _type = "Task" [ 1044.116023] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.127223] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.127929] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661055, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.128209] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34e90a8d-3bd2-4c67-ad62-33b76e522d61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.137142] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1044.137142] env[65758]: value = "task-4661056" [ 1044.137142] env[65758]: _type = "Task" [ 1044.137142] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.149940] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661056, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.321157] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661053, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.323081] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.330610] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aea1ff1-a7a7-4512-a524-b6efbd9184ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.334301] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661054, 'name': ReconfigVM_Task, 'duration_secs': 0.386579} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.334602] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 31b7d1ee-58c1-47f3-a862-0bc5cb17addc/31b7d1ee-58c1-47f3-a862-0bc5cb17addc.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.335941] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3505324e-60e7-4de9-a0c9-413d9490d437 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.341055] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.341345] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-feb4dc3c-8f72-42e7-a4f8-6b125ba99ec6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.344809] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1044.344809] env[65758]: value = "task-4661057" [ 1044.344809] env[65758]: _type = "Task" [ 1044.344809] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.358616] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661057, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.434701] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.435195] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.435404] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleting the datastore file [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.435790] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69a40ea4-61d7-46cf-b68e-5919ab28d341 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.444489] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1044.444489] env[65758]: value = "task-4661059" [ 1044.444489] env[65758]: _type = "Task" [ 1044.444489] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.454961] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661059, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.498693] env[65758]: DEBUG nova.compute.manager [req-576e2bda-2044-4b9e-a204-1bd24010dbb2 req-933e9a23-467b-4c66-8f13-5cb353f67772 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received event network-vif-unplugged-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1044.498953] env[65758]: DEBUG oslo_concurrency.lockutils [req-576e2bda-2044-4b9e-a204-1bd24010dbb2 req-933e9a23-467b-4c66-8f13-5cb353f67772 service nova] Acquiring lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.499280] env[65758]: DEBUG oslo_concurrency.lockutils [req-576e2bda-2044-4b9e-a204-1bd24010dbb2 req-933e9a23-467b-4c66-8f13-5cb353f67772 service nova] Lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.500109] env[65758]: DEBUG oslo_concurrency.lockutils [req-576e2bda-2044-4b9e-a204-1bd24010dbb2 req-933e9a23-467b-4c66-8f13-5cb353f67772 service nova] Lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.500109] env[65758]: DEBUG nova.compute.manager [req-576e2bda-2044-4b9e-a204-1bd24010dbb2 req-933e9a23-467b-4c66-8f13-5cb353f67772 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] No waiting events found dispatching network-vif-unplugged-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1044.500109] env[65758]: WARNING nova.compute.manager [req-576e2bda-2044-4b9e-a204-1bd24010dbb2 req-933e9a23-467b-4c66-8f13-5cb353f67772 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received unexpected event network-vif-unplugged-83c394c9-9b0d-40ad-923c-00e70d63c85a for instance with vm_state shelved and task_state shelving_offloading. [ 1044.587473] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.587808] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.588147] env[65758]: INFO nova.compute.manager [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Shelving [ 1044.629022] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661055, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.651790] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661056, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.812219] env[65758]: DEBUG oslo_vmware.api [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661053, 'name': PowerOnVM_Task, 'duration_secs': 0.584652} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.812219] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1044.812219] env[65758]: INFO nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Took 10.56 seconds to spawn the instance on the hypervisor. [ 1044.812503] env[65758]: DEBUG nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1044.813216] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2574f110-cb0e-435e-b283-8924792fc3a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.858656] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661057, 'name': Rename_Task, 'duration_secs': 0.255639} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.859492] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.859492] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90aaeff2-8882-4a5a-8330-7ce81962a030 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.867897] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1044.867897] env[65758]: value = "task-4661060" [ 1044.867897] env[65758]: _type = "Task" [ 1044.867897] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.879552] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661060, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.959266] env[65758]: DEBUG oslo_vmware.api [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661059, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354319} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.959934] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.960350] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1044.960709] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1044.992147] env[65758]: INFO nova.scheduler.client.report [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleted allocations for instance 63b744d2-541a-42e3-9717-b06a4459fd50 [ 1045.128366] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661055, 'name': Destroy_Task, 'duration_secs': 0.761863} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.128652] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Destroyed the VM [ 1045.128913] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1045.129195] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-209e8934-8f5d-4e2e-9fd7-296fc7d38c59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.136955] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1045.136955] env[65758]: value = "task-4661061" [ 1045.136955] env[65758]: _type = "Task" [ 1045.136955] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.151172] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661061, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.155100] env[65758]: DEBUG oslo_vmware.api [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661056, 'name': PowerOnVM_Task, 'duration_secs': 0.519666} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.155402] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.155607] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8607038d-c73e-4173-a063-148bd428001b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance 'e6159a35-f073-4931-b0b0-832a88680356' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1045.337573] env[65758]: INFO nova.compute.manager [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Took 15.94 seconds to build instance. [ 1045.380577] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661060, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.497906] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.498751] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.498751] env[65758]: DEBUG nova.objects.instance [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lazy-loading 'resources' on Instance uuid 63b744d2-541a-42e3-9717-b06a4459fd50 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.600305] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.600305] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53283cc3-54a2-48ed-bd7f-2914308d2108 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.610239] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1045.610239] env[65758]: value = "task-4661062" [ 1045.610239] env[65758]: _type = "Task" [ 1045.610239] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.623200] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.652402] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661061, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.843052] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e4b4d51-cb57-443b-9075-fcb29b75e3d2 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.458s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.884761] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661060, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.002710] env[65758]: DEBUG nova.objects.instance [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lazy-loading 'numa_topology' on Instance uuid 63b744d2-541a-42e3-9717-b06a4459fd50 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.127084] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661062, 'name': PowerOffVM_Task, 'duration_secs': 0.258138} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.127084] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1046.127084] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e38f72-f956-490f-880c-5f9b34d2e706 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.149126] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013bb35a-7b7c-4edb-874c-801262fd49eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.163533] env[65758]: DEBUG oslo_vmware.api [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661061, 'name': RemoveSnapshot_Task, 'duration_secs': 0.891285} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.163809] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1046.164127] env[65758]: INFO nova.compute.manager [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Took 17.72 seconds to snapshot the instance on the hypervisor. [ 1046.295820] env[65758]: DEBUG nova.compute.manager [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Received event network-changed-40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1046.296063] env[65758]: DEBUG nova.compute.manager [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Refreshing instance network info cache due to event network-changed-40ae9fbf-7f23-48e1-bd47-7de2b62ace7e. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1046.296326] env[65758]: DEBUG oslo_concurrency.lockutils [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] Acquiring lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.296433] env[65758]: DEBUG oslo_concurrency.lockutils [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] Acquired lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.297024] env[65758]: DEBUG nova.network.neutron [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Refreshing network info cache for port 40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1046.381972] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661060, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.509812] env[65758]: DEBUG nova.objects.base [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Object Instance<63b744d2-541a-42e3-9717-b06a4459fd50> lazy-loaded attributes: resources,numa_topology {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1046.555148] env[65758]: DEBUG nova.compute.manager [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received event network-changed-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1046.555148] env[65758]: DEBUG nova.compute.manager [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Refreshing instance network info cache due to event network-changed-83c394c9-9b0d-40ad-923c-00e70d63c85a. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1046.555148] env[65758]: DEBUG oslo_concurrency.lockutils [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] Acquiring lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.555148] env[65758]: DEBUG oslo_concurrency.lockutils [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] Acquired lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.555581] env[65758]: DEBUG nova.network.neutron [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Refreshing network info cache for port 83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1046.643737] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquiring lock "a014debf-2f16-4b30-af78-27a6751060de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.644196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "a014debf-2f16-4b30-af78-27a6751060de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.666515] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1046.668947] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-86913ea2-3b44-4310-b656-410f2b568568 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.694856] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1046.694856] env[65758]: value = "task-4661063" [ 1046.694856] env[65758]: _type = "Task" [ 1046.694856] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.709611] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661063, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.750704] env[65758]: DEBUG nova.compute.manager [None req-688fdaec-752a-4839-8ab4-29f8476a92f5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Found 2 images (rotation: 2) {{(pid=65758) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5057}} [ 1046.800035] env[65758]: WARNING neutronclient.v2_0.client [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1046.801783] env[65758]: WARNING openstack [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1046.802309] env[65758]: WARNING openstack [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1046.839534] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b5d587-fed0-406e-b906-868ff8cb0f51 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.852119] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78c78e8-c382-4f65-9778-67666aa48b8d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.898913] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365e745a-4b9b-4a29-8400-ae998baaa078 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.911163] env[65758]: DEBUG oslo_vmware.api [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661060, 'name': PowerOnVM_Task, 'duration_secs': 1.797668} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.912887] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1046.913922] env[65758]: INFO nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Took 11.14 seconds to spawn the instance on the hypervisor. [ 1046.914385] env[65758]: DEBUG nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1046.915762] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af519a9e-90eb-4c6f-8ff8-5528bb11946f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.921180] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6389a89-8c1b-43ea-9ac9-4638e229cef2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.947500] env[65758]: DEBUG nova.compute.provider_tree [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.058152] env[65758]: WARNING neutronclient.v2_0.client [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1047.059012] env[65758]: WARNING openstack [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1047.059424] env[65758]: WARNING openstack [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1047.139717] env[65758]: WARNING neutronclient.v2_0.client [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1047.140626] env[65758]: WARNING openstack [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1047.140984] env[65758]: WARNING openstack [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1047.150184] env[65758]: DEBUG nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1047.160442] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquiring lock "37bae4b3-6959-4f44-8600-26a4f859103c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.164649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "37bae4b3-6959-4f44-8600-26a4f859103c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.164649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquiring lock "37bae4b3-6959-4f44-8600-26a4f859103c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.164649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "37bae4b3-6959-4f44-8600-26a4f859103c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.164649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "37bae4b3-6959-4f44-8600-26a4f859103c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.164649] env[65758]: INFO nova.compute.manager [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Terminating instance [ 1047.205350] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661063, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.220404] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.220633] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.221080] env[65758]: DEBUG nova.objects.instance [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'flavor' on Instance uuid 5fc4f1b8-9024-4155-b56d-56a8d08f0259 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.293379] env[65758]: DEBUG nova.network.neutron [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updated VIF entry in instance network info cache for port 40ae9fbf-7f23-48e1-bd47-7de2b62ace7e. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1047.293783] env[65758]: DEBUG nova.network.neutron [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updating instance_info_cache with network_info: [{"id": "40ae9fbf-7f23-48e1-bd47-7de2b62ace7e", "address": "fa:16:3e:c9:90:4c", "network": {"id": "271db233-74ed-49c4-a8b0-3f5504e71055", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-663871086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16188c7bd36d4b0eaffdc980b71ac727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ae9fbf-7f", "ovs_interfaceid": "40ae9fbf-7f23-48e1-bd47-7de2b62ace7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1047.328462] env[65758]: WARNING neutronclient.v2_0.client [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1047.330033] env[65758]: WARNING openstack [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1047.330323] env[65758]: WARNING openstack [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1047.441363] env[65758]: DEBUG nova.network.neutron [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updated VIF entry in instance network info cache for port 83c394c9-9b0d-40ad-923c-00e70d63c85a. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1047.441733] env[65758]: DEBUG nova.network.neutron [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [{"id": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "address": "fa:16:3e:01:98:57", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": null, "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap83c394c9-9b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1047.452990] env[65758]: DEBUG nova.scheduler.client.report [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.459497] env[65758]: INFO nova.compute.manager [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Took 17.45 seconds to build instance. [ 1047.669492] env[65758]: DEBUG nova.compute.manager [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1047.670033] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1047.672532] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45924b45-3bde-47d5-bb74-d6d84f257aa4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.680529] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.687931] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.689058] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4f1f0cf-52be-4a86-9733-8fe282bd8f9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.699026] env[65758]: DEBUG oslo_vmware.api [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1047.699026] env[65758]: value = "task-4661064" [ 1047.699026] env[65758]: _type = "Task" [ 1047.699026] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.707724] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661063, 'name': CreateSnapshot_Task, 'duration_secs': 0.648321} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.708142] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1047.709342] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2090c64e-4cae-403c-b0b1-b0983c685c98 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.717501] env[65758]: DEBUG oslo_vmware.api [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4661064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.727867] env[65758]: WARNING neutronclient.v2_0.client [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1047.728458] env[65758]: WARNING openstack [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1047.728974] env[65758]: WARNING openstack [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1047.735257] env[65758]: DEBUG nova.objects.instance [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'pci_requests' on Instance uuid 5fc4f1b8-9024-4155-b56d-56a8d08f0259 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.797943] env[65758]: DEBUG oslo_concurrency.lockutils [req-9e5b84e0-10de-4163-ac05-110f7d179297 req-d981bfee-8fe7-4092-ac06-bde366da1b38 service nova] Releasing lock "refresh_cache-ade1d760-e3e7-49c8-ba9d-b4829ca60841" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.944804] env[65758]: DEBUG oslo_concurrency.lockutils [req-1d7aa6fb-ee32-45cd-8c5f-3aa4c859a7d1 req-43aef324-91be-4884-a129-1bd67cd56bab service nova] Releasing lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.961777] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.463s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.965133] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.284s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.966314] env[65758]: INFO nova.compute.claims [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1047.969353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ba1685a3-d10a-4e9b-8294-162f321aafe1 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.967s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.183073] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "63b744d2-541a-42e3-9717-b06a4459fd50" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.212752] env[65758]: DEBUG oslo_vmware.api [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4661064, 'name': PowerOffVM_Task, 'duration_secs': 0.21557} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.213172] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.213370] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.213700] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f05529a4-d34d-40fb-991c-30e390a12a95 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.248924] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1048.249578] env[65758]: DEBUG nova.objects.base [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Object Instance<5fc4f1b8-9024-4155-b56d-56a8d08f0259> lazy-loaded attributes: flavor,pci_requests {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1048.249780] env[65758]: DEBUG nova.network.neutron [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1048.250141] env[65758]: WARNING neutronclient.v2_0.client [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1048.250460] env[65758]: WARNING neutronclient.v2_0.client [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1048.251060] env[65758]: WARNING openstack [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1048.251420] env[65758]: WARNING openstack [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1048.258510] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4c43dbc5-6c4d-4e01-8509-adc5d2375ef3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.268781] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1048.268781] env[65758]: value = "task-4661066" [ 1048.268781] env[65758]: _type = "Task" [ 1048.268781] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.283211] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661066, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.288687] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.288884] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.289084] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Deleting the datastore file [datastore1] 37bae4b3-6959-4f44-8600-26a4f859103c {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.289453] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a289cda8-374a-4efd-9558-47185e71e890 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.298261] env[65758]: DEBUG oslo_vmware.api [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for the task: (returnval){ [ 1048.298261] env[65758]: value = "task-4661067" [ 1048.298261] env[65758]: _type = "Task" [ 1048.298261] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.310814] env[65758]: DEBUG oslo_vmware.api [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4661067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.355883] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bc4365c-226b-44d8-b056-b58d8298b805 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.135s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.477224] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6bfa071e-852f-4e59-82e3-c97608569178 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.449s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.478452] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.296s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.478619] env[65758]: INFO nova.compute.manager [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Unshelving [ 1048.575498] env[65758]: DEBUG nova.compute.manager [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1048.576951] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a46f95-ad5b-4524-b1a8-ba67b02a3b65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.783185] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661066, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.808601] env[65758]: DEBUG oslo_vmware.api [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Task: {'id': task-4661067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.233177} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.808868] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1048.809059] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1048.809244] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1048.809413] env[65758]: INFO nova.compute.manager [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1048.809679] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1048.809882] env[65758]: DEBUG nova.compute.manager [-] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1048.809980] env[65758]: DEBUG nova.network.neutron [-] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1048.810246] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1048.810772] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1048.811035] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1048.818196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "e6159a35-f073-4931-b0b0-832a88680356" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.818514] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.818691] env[65758]: DEBUG nova.compute.manager [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Going to confirm migration 5 {{(pid=65758) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1048.864976] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1049.094020] env[65758]: INFO nova.compute.manager [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] instance snapshotting [ 1049.094741] env[65758]: DEBUG nova.objects.instance [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'flavor' on Instance uuid 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.105898] env[65758]: DEBUG oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab67bd-6a89-314d-dfed-d3d5d6b8e517/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1049.107792] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71205763-1293-41b3-9523-b8553db3ed15 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.118821] env[65758]: DEBUG oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab67bd-6a89-314d-dfed-d3d5d6b8e517/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1049.119125] env[65758]: ERROR oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab67bd-6a89-314d-dfed-d3d5d6b8e517/disk-0.vmdk due to incomplete transfer. [ 1049.119370] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c9e224d9-592e-4e1c-969a-d46be0b87030 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.135042] env[65758]: DEBUG oslo_vmware.rw_handles [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab67bd-6a89-314d-dfed-d3d5d6b8e517/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1049.135042] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Uploaded image b02ef41d-27e9-450f-ae97-a90537c4af36 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1049.137450] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1049.142033] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-afb96c95-e71a-45eb-9bb6-466608e168ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.150446] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1049.150446] env[65758]: value = "task-4661068" [ 1049.150446] env[65758]: _type = "Task" [ 1049.150446] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.166770] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661068, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.252229] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c0e5f9-03ca-42a2-87b8-ddcc3af2d767 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.265211] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cd090a-e75a-40da-85f7-c57b98283963 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.302289] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3089b022-b315-4984-8de0-a493fb69ba82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.308872] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661066, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.318142] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f568b5-255b-4c04-ac0c-d948ebaf29a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.334045] env[65758]: DEBUG nova.compute.provider_tree [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.337356] env[65758]: WARNING neutronclient.v2_0.client [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1049.418490] env[65758]: WARNING neutronclient.v2_0.client [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1049.419311] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.419799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.420394] env[65758]: DEBUG nova.network.neutron [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1049.420814] env[65758]: DEBUG nova.objects.instance [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'info_cache' on Instance uuid e6159a35-f073-4931-b0b0-832a88680356 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.523327] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.532479] env[65758]: DEBUG nova.compute.manager [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Received event network-changed-f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1049.532714] env[65758]: DEBUG nova.compute.manager [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Refreshing instance network info cache due to event network-changed-f30ab0a1-5ab3-4e16-a881-f850a8fd4399. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1049.532994] env[65758]: DEBUG oslo_concurrency.lockutils [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] Acquiring lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.533187] env[65758]: DEBUG oslo_concurrency.lockutils [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] Acquired lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.533920] env[65758]: DEBUG nova.network.neutron [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Refreshing network info cache for port f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1049.608108] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93af573f-c992-4b26-8bd9-2e6abdaa3ba5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.631059] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df738dc-e713-41de-8378-1b7729a5ed34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.661916] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661068, 'name': Destroy_Task, 'duration_secs': 0.39768} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.662267] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Destroyed the VM [ 1049.662542] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1049.662812] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d387a5f1-e278-4b3d-bd7e-db08c3eafb64 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.671267] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1049.671267] env[65758]: value = "task-4661069" [ 1049.671267] env[65758]: _type = "Task" [ 1049.671267] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.681466] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661069, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.783739] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661066, 'name': CloneVM_Task} progress is 95%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.842324] env[65758]: DEBUG nova.scheduler.client.report [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.889101] env[65758]: DEBUG nova.network.neutron [-] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1050.036424] env[65758]: WARNING neutronclient.v2_0.client [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1050.038302] env[65758]: WARNING openstack [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1050.038974] env[65758]: WARNING openstack [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1050.142646] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1050.143052] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bf3339b6-147d-4424-a5ad-e51725bd6956 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.152259] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1050.152259] env[65758]: value = "task-4661070" [ 1050.152259] env[65758]: _type = "Task" [ 1050.152259] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.164429] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661070, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.183596] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661069, 'name': RemoveSnapshot_Task, 'duration_secs': 0.417188} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.183823] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1050.184231] env[65758]: DEBUG nova.compute.manager [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1050.185065] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850f28cb-b30c-40ee-8ac8-6acc2585c4ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.220965] env[65758]: WARNING neutronclient.v2_0.client [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1050.222100] env[65758]: WARNING openstack [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1050.222233] env[65758]: WARNING openstack [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1050.287502] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661066, 'name': CloneVM_Task, 'duration_secs': 1.637433} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.287777] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Created linked-clone VM from snapshot [ 1050.288682] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313244e2-8dfd-4aa6-bd0c-d5210372de0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.297690] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Uploading image 2edcb03c-85ab-4d21-8ff4-b3b47fae6985 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1050.320259] env[65758]: DEBUG nova.network.neutron [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Updated VIF entry in instance network info cache for port f30ab0a1-5ab3-4e16-a881-f850a8fd4399. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1050.320781] env[65758]: DEBUG nova.network.neutron [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Updating instance_info_cache with network_info: [{"id": "f30ab0a1-5ab3-4e16-a881-f850a8fd4399", "address": "fa:16:3e:39:c5:f9", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30ab0a1-5a", "ovs_interfaceid": "f30ab0a1-5ab3-4e16-a881-f850a8fd4399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1050.324568] env[65758]: DEBUG oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1050.324568] env[65758]: value = "vm-910029" [ 1050.324568] env[65758]: _type = "VirtualMachine" [ 1050.324568] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1050.324969] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ad89fd22-f0f6-45d2-b01f-2c9ba056b7db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.334553] env[65758]: DEBUG oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lease: (returnval){ [ 1050.334553] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521a5b56-c5e8-1724-0d14-fa223661f0f0" [ 1050.334553] env[65758]: _type = "HttpNfcLease" [ 1050.334553] env[65758]: } obtained for exporting VM: (result){ [ 1050.334553] env[65758]: value = "vm-910029" [ 1050.334553] env[65758]: _type = "VirtualMachine" [ 1050.334553] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1050.334885] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the lease: (returnval){ [ 1050.334885] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521a5b56-c5e8-1724-0d14-fa223661f0f0" [ 1050.334885] env[65758]: _type = "HttpNfcLease" [ 1050.334885] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1050.345191] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1050.345191] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521a5b56-c5e8-1724-0d14-fa223661f0f0" [ 1050.345191] env[65758]: _type = "HttpNfcLease" [ 1050.345191] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1050.349353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.349988] env[65758]: DEBUG nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1050.353125] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.830s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.353371] env[65758]: DEBUG nova.objects.instance [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lazy-loading 'pci_requests' on Instance uuid 63b744d2-541a-42e3-9717-b06a4459fd50 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.354576] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.354799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.355134] env[65758]: DEBUG nova.objects.instance [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'flavor' on Instance uuid 5fc4f1b8-9024-4155-b56d-56a8d08f0259 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.394274] env[65758]: INFO nova.compute.manager [-] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Took 1.58 seconds to deallocate network for instance. [ 1050.429318] env[65758]: WARNING neutronclient.v2_0.client [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1050.429986] env[65758]: WARNING openstack [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1050.430588] env[65758]: WARNING openstack [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1050.605898] env[65758]: WARNING neutronclient.v2_0.client [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1050.606596] env[65758]: WARNING openstack [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1050.607039] env[65758]: WARNING openstack [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1050.664786] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661070, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.693391] env[65758]: DEBUG nova.network.neutron [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [{"id": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "address": "fa:16:3e:a5:af:26", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b8d361-f2", "ovs_interfaceid": "b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1050.700283] env[65758]: INFO nova.compute.manager [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Shelve offloading [ 1050.828524] env[65758]: DEBUG oslo_concurrency.lockutils [req-1ed9f803-178f-4519-8265-c3234a2b42d2 req-f1c51b06-cd70-422a-99cf-2f647a0d3576 service nova] Releasing lock "refresh_cache-31b7d1ee-58c1-47f3-a862-0bc5cb17addc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.844846] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1050.844846] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521a5b56-c5e8-1724-0d14-fa223661f0f0" [ 1050.844846] env[65758]: _type = "HttpNfcLease" [ 1050.844846] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1050.845451] env[65758]: DEBUG oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1050.845451] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521a5b56-c5e8-1724-0d14-fa223661f0f0" [ 1050.845451] env[65758]: _type = "HttpNfcLease" [ 1050.845451] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1050.846271] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecec02f7-825b-4939-a171-ea58bf5506be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.856010] env[65758]: DEBUG oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c64b76-4ecb-0fd5-113a-c0769526f9ec/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1050.856261] env[65758]: DEBUG oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c64b76-4ecb-0fd5-113a-c0769526f9ec/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1050.858787] env[65758]: DEBUG nova.compute.utils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1050.861420] env[65758]: DEBUG nova.objects.instance [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lazy-loading 'numa_topology' on Instance uuid 63b744d2-541a-42e3-9717-b06a4459fd50 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.863452] env[65758]: DEBUG nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1050.863653] env[65758]: DEBUG nova.network.neutron [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1050.864024] env[65758]: WARNING neutronclient.v2_0.client [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1050.864424] env[65758]: WARNING neutronclient.v2_0.client [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1050.865030] env[65758]: WARNING openstack [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1050.865402] env[65758]: WARNING openstack [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1050.873037] env[65758]: WARNING neutronclient.v2_0.client [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1050.873619] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1050.873990] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1050.935132] env[65758]: INFO nova.compute.claims [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1050.939385] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.984055] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2f8c4295-0a62-48bc-9c6c-54a744518b74 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.997333] env[65758]: DEBUG nova.policy [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05f7aeab6e534bf9bd566dbfa0da1418', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02d1056adfc646858ba42771ad01c221', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1051.042220] env[65758]: DEBUG nova.objects.instance [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'pci_requests' on Instance uuid 5fc4f1b8-9024-4155-b56d-56a8d08f0259 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.166050] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661070, 'name': CreateSnapshot_Task, 'duration_secs': 0.549676} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.166349] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1051.167109] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386a90d8-2849-4d8f-82f3-16d5b74be164 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.196748] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-e6159a35-f073-4931-b0b0-832a88680356" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.197018] env[65758]: DEBUG nova.objects.instance [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'migration_context' on Instance uuid e6159a35-f073-4931-b0b0-832a88680356 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.205031] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.205199] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77954878-7b83-47f9-b90c-083c94568493 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.213940] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1051.213940] env[65758]: value = "task-4661073" [ 1051.213940] env[65758]: _type = "Task" [ 1051.213940] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.229601] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1051.229892] env[65758]: DEBUG nova.compute.manager [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1051.230748] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2615d8-6442-426f-a9e4-6bd15373dd2a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.237853] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.238024] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.238200] env[65758]: DEBUG nova.network.neutron [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1051.326127] env[65758]: DEBUG nova.network.neutron [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Successfully created port: 8fa0a611-f7a8-44d4-8921-988332d441bc {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1051.364798] env[65758]: DEBUG nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1051.546584] env[65758]: DEBUG nova.objects.base [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Object Instance<5fc4f1b8-9024-4155-b56d-56a8d08f0259> lazy-loaded attributes: flavor,pci_requests {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1051.546584] env[65758]: DEBUG nova.network.neutron [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1051.546584] env[65758]: WARNING neutronclient.v2_0.client [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1051.546584] env[65758]: WARNING neutronclient.v2_0.client [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1051.546584] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1051.546584] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1051.609493] env[65758]: DEBUG nova.policy [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1051.686608] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1051.687066] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ebb48c8e-4fec-4216-a915-8bdd91453318 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.697657] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1051.697657] env[65758]: value = "task-4661074" [ 1051.697657] env[65758]: _type = "Task" [ 1051.697657] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.701798] env[65758]: DEBUG nova.objects.base [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1051.703335] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c03bdaa-8d05-4f2b-aedc-55751cd8c49d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.728201] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661074, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.728530] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc9a5515-14e6-464f-8d34-ffcde9b2a341 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.737585] env[65758]: DEBUG oslo_vmware.api [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1051.737585] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529f4db9-b9c3-86c8-ce8e-4fae02bc8619" [ 1051.737585] env[65758]: _type = "Task" [ 1051.737585] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.741484] env[65758]: WARNING neutronclient.v2_0.client [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1051.742368] env[65758]: WARNING openstack [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1051.742800] env[65758]: WARNING openstack [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1051.757429] env[65758]: DEBUG oslo_vmware.api [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529f4db9-b9c3-86c8-ce8e-4fae02bc8619, 'name': SearchDatastore_Task, 'duration_secs': 0.009021} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.757742] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.938385] env[65758]: DEBUG nova.network.neutron [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Successfully created port: 608946a3-79b3-484c-b023-da1a84676162 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1052.204570] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd866c6-f4c2-464d-8f1d-77936690d693 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.217442] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdbd164-0bea-43f0-9427-8687023b2bcb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.221358] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661074, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.252511] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52615f5-876f-486b-9685-0a40f85768ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.264511] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70280e10-3394-4153-93e9-a20a50d116fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.285117] env[65758]: DEBUG nova.compute.provider_tree [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.374716] env[65758]: DEBUG nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1052.410076] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1052.410516] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1052.410751] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1052.411039] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1052.411359] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1052.411579] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1052.411847] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1052.412091] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1052.412435] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1052.412678] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1052.412858] env[65758]: DEBUG nova.virt.hardware [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1052.413922] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59fe7a3-183b-4a79-a91d-6a0ebaedb76d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.424430] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37e6c57-ae4f-4a45-aa60-6f9c38d3ec39 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.708765] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661074, 'name': CloneVM_Task} progress is 95%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.789163] env[65758]: DEBUG nova.scheduler.client.report [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.888913] env[65758]: DEBUG nova.network.neutron [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Successfully updated port: 8fa0a611-f7a8-44d4-8921-988332d441bc {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1052.970426] env[65758]: DEBUG nova.compute.manager [req-e431aa64-0402-473f-a511-fb63fc810fa9 req-070b0b7f-c100-4730-91c7-fcb8cfa9a531 service nova] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Received event network-vif-deleted-e953f008-edba-4efb-8764-649f24572836 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1053.161396] env[65758]: WARNING neutronclient.v2_0.client [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1053.162350] env[65758]: WARNING openstack [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1053.162791] env[65758]: WARNING openstack [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.211621] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661074, 'name': CloneVM_Task, 'duration_secs': 1.42205} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.211917] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Created linked-clone VM from snapshot [ 1053.212788] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5cf4dc-eee4-493e-a65e-b73e2b7b8a03 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.221605] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Uploading image 109273e3-b0b7-4090-8a28-89c1405c1b9c {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1053.246763] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1053.246763] env[65758]: value = "vm-910032" [ 1053.246763] env[65758]: _type = "VirtualMachine" [ 1053.246763] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1053.247138] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-02bf3169-c546-46c8-8759-1edfc40f2c33 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.255494] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease: (returnval){ [ 1053.255494] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5276f959-fbb8-fcde-8206-d58720e831c2" [ 1053.255494] env[65758]: _type = "HttpNfcLease" [ 1053.255494] env[65758]: } obtained for exporting VM: (result){ [ 1053.255494] env[65758]: value = "vm-910032" [ 1053.255494] env[65758]: _type = "VirtualMachine" [ 1053.255494] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1053.256128] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the lease: (returnval){ [ 1053.256128] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5276f959-fbb8-fcde-8206-d58720e831c2" [ 1053.256128] env[65758]: _type = "HttpNfcLease" [ 1053.256128] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1053.268854] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1053.268854] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5276f959-fbb8-fcde-8206-d58720e831c2" [ 1053.268854] env[65758]: _type = "HttpNfcLease" [ 1053.268854] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1053.295178] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.942s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.295783] env[65758]: WARNING neutronclient.v2_0.client [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1053.299021] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.360s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.299369] env[65758]: DEBUG nova.objects.instance [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lazy-loading 'resources' on Instance uuid 37bae4b3-6959-4f44-8600-26a4f859103c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.323142] env[65758]: DEBUG nova.network.neutron [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Updating instance_info_cache with network_info: [{"id": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "address": "fa:16:3e:c6:9a:3f", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4288a41e-2e", "ovs_interfaceid": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1053.351479] env[65758]: INFO nova.network.neutron [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating port 83c394c9-9b0d-40ad-923c-00e70d63c85a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1053.392454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquiring lock "refresh_cache-a014debf-2f16-4b30-af78-27a6751060de" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.392836] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquired lock "refresh_cache-a014debf-2f16-4b30-af78-27a6751060de" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.392949] env[65758]: DEBUG nova.network.neutron [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1053.752662] env[65758]: DEBUG nova.network.neutron [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Successfully updated port: 608946a3-79b3-484c-b023-da1a84676162 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1053.770933] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1053.770933] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5276f959-fbb8-fcde-8206-d58720e831c2" [ 1053.770933] env[65758]: _type = "HttpNfcLease" [ 1053.770933] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1053.771284] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1053.771284] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5276f959-fbb8-fcde-8206-d58720e831c2" [ 1053.771284] env[65758]: _type = "HttpNfcLease" [ 1053.771284] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1053.772321] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c442b6f8-835d-4ebe-8a93-4f1db8d45a55 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.783122] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52960262-0d04-f3e1-6f30-aa38bc124116/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1053.783594] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52960262-0d04-f3e1-6f30-aa38bc124116/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1053.853208] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.854174] env[65758]: WARNING neutronclient.v2_0.client [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1053.854533] env[65758]: WARNING openstack [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1053.855293] env[65758]: WARNING openstack [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.862188] env[65758]: WARNING neutronclient.v2_0.client [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1053.896750] env[65758]: WARNING openstack [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1053.897205] env[65758]: WARNING openstack [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1053.939567] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-71eddb2e-22a0-4f19-bf0d-1bc403dcb897 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.972508] env[65758]: DEBUG nova.network.neutron [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1054.146835] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9160a2b-a21f-44b4-b17c-a915d3135126 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.156736] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fc1b49-906e-4eec-be71-1b109563dbdd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.190012] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f115231b-1872-41f7-99c9-54a8f0b8679b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.199310] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88da76dd-819f-4b15-8598-551dc8dce105 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.216417] env[65758]: DEBUG nova.compute.provider_tree [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.260192] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.260543] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.260882] env[65758]: DEBUG nova.network.neutron [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1054.331017] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1054.332269] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbcd00c-a505-4696-92a9-aa1f0f8ee889 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.342478] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1054.342718] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c578974-da31-49d0-819e-495b9939ef3a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.427028] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1054.427028] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1054.427028] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleting the datastore file [datastore2] 89167b37-4c21-4678-a0f0-5a4ce932c4d8 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.427028] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a41f316b-f124-48bd-8ad7-868db64b9185 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.436680] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1054.436680] env[65758]: value = "task-4661078" [ 1054.436680] env[65758]: _type = "Task" [ 1054.436680] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.447610] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.454085] env[65758]: WARNING neutronclient.v2_0.client [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1054.455486] env[65758]: WARNING openstack [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1054.456153] env[65758]: WARNING openstack [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1054.516298] env[65758]: DEBUG nova.compute.manager [req-4827f970-d839-4fe9-9a95-535dca939489 req-c49d9792-fe1e-4f50-9008-898fc0464592 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] Received event network-vif-plugged-8fa0a611-f7a8-44d4-8921-988332d441bc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1054.516949] env[65758]: DEBUG oslo_concurrency.lockutils [req-4827f970-d839-4fe9-9a95-535dca939489 req-c49d9792-fe1e-4f50-9008-898fc0464592 service nova] Acquiring lock "a014debf-2f16-4b30-af78-27a6751060de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.517620] env[65758]: DEBUG oslo_concurrency.lockutils [req-4827f970-d839-4fe9-9a95-535dca939489 req-c49d9792-fe1e-4f50-9008-898fc0464592 service nova] Lock "a014debf-2f16-4b30-af78-27a6751060de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.518142] env[65758]: DEBUG oslo_concurrency.lockutils [req-4827f970-d839-4fe9-9a95-535dca939489 req-c49d9792-fe1e-4f50-9008-898fc0464592 service nova] Lock "a014debf-2f16-4b30-af78-27a6751060de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.518430] env[65758]: DEBUG nova.compute.manager [req-4827f970-d839-4fe9-9a95-535dca939489 req-c49d9792-fe1e-4f50-9008-898fc0464592 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] No waiting events found dispatching network-vif-plugged-8fa0a611-f7a8-44d4-8921-988332d441bc {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1054.518771] env[65758]: WARNING nova.compute.manager [req-4827f970-d839-4fe9-9a95-535dca939489 req-c49d9792-fe1e-4f50-9008-898fc0464592 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] Received unexpected event network-vif-plugged-8fa0a611-f7a8-44d4-8921-988332d441bc for instance with vm_state building and task_state spawning. [ 1054.570946] env[65758]: DEBUG nova.network.neutron [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Updating instance_info_cache with network_info: [{"id": "8fa0a611-f7a8-44d4-8921-988332d441bc", "address": "fa:16:3e:7c:8c:aa", "network": {"id": "de517dde-5ce2-4834-a3e3-a7e172ed87cc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1593710302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d1056adfc646858ba42771ad01c221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fa0a611-f7", "ovs_interfaceid": "8fa0a611-f7a8-44d4-8921-988332d441bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1054.720761] env[65758]: DEBUG nova.scheduler.client.report [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.765440] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1054.765872] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1054.811613] env[65758]: WARNING nova.network.neutron [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] 2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4 already exists in list: networks containing: ['2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4']. ignoring it [ 1054.950101] env[65758]: DEBUG oslo_vmware.api [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3045} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.950664] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.950962] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1054.951254] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1054.982171] env[65758]: INFO nova.scheduler.client.report [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted allocations for instance 89167b37-4c21-4678-a0f0-5a4ce932c4d8 [ 1055.078029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Releasing lock "refresh_cache-a014debf-2f16-4b30-af78-27a6751060de" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.078029] env[65758]: DEBUG nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Instance network_info: |[{"id": "8fa0a611-f7a8-44d4-8921-988332d441bc", "address": "fa:16:3e:7c:8c:aa", "network": {"id": "de517dde-5ce2-4834-a3e3-a7e172ed87cc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1593710302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d1056adfc646858ba42771ad01c221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fa0a611-f7", "ovs_interfaceid": "8fa0a611-f7a8-44d4-8921-988332d441bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1055.078029] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:8c:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fa0a611-f7a8-44d4-8921-988332d441bc', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1055.085500] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Creating folder: Project (02d1056adfc646858ba42771ad01c221). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1055.087078] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1e455ee-5703-46f8-9425-3aa12e8b9d2f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.103626] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Created folder: Project (02d1056adfc646858ba42771ad01c221) in parent group-v909763. [ 1055.104827] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Creating folder: Instances. Parent ref: group-v910033. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1055.105107] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba906910-bfc2-4184-aff0-3c20b1ce5d54 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.119026] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Created folder: Instances in parent group-v910033. [ 1055.119026] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1055.119423] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a014debf-2f16-4b30-af78-27a6751060de] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1055.120676] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63fd861c-70a7-4cdc-ab00-090139477e25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.154824] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1055.154824] env[65758]: value = "task-4661081" [ 1055.154824] env[65758]: _type = "Task" [ 1055.154824] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.173749] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661081, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.176631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.176631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.176631] env[65758]: DEBUG nova.network.neutron [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1055.227506] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.928s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.230518] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.473s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.256190] env[65758]: INFO nova.scheduler.client.report [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Deleted allocations for instance 37bae4b3-6959-4f44-8600-26a4f859103c [ 1055.486883] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.493819] env[65758]: WARNING neutronclient.v2_0.client [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.494778] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1055.495268] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1055.617329] env[65758]: DEBUG nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-vif-plugged-608946a3-79b3-484c-b023-da1a84676162 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1055.617751] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.618649] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.618858] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.619044] env[65758]: DEBUG nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] No waiting events found dispatching network-vif-plugged-608946a3-79b3-484c-b023-da1a84676162 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1055.619279] env[65758]: WARNING nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received unexpected event network-vif-plugged-608946a3-79b3-484c-b023-da1a84676162 for instance with vm_state active and task_state None. [ 1055.619516] env[65758]: DEBUG nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-changed-608946a3-79b3-484c-b023-da1a84676162 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1055.619631] env[65758]: DEBUG nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Refreshing instance network info cache due to event network-changed-608946a3-79b3-484c-b023-da1a84676162. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1055.619792] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Acquiring lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.665950] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661081, 'name': CreateVM_Task, 'duration_secs': 0.471894} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.666252] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a014debf-2f16-4b30-af78-27a6751060de] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1055.666978] env[65758]: WARNING neutronclient.v2_0.client [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.667528] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.667914] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.668391] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1055.668641] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6704c250-95a1-4d19-8422-4ffb2adf62ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.672136] env[65758]: WARNING neutronclient.v2_0.client [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.673137] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1055.673584] env[65758]: WARNING openstack [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1055.682025] env[65758]: WARNING neutronclient.v2_0.client [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.682793] env[65758]: WARNING openstack [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1055.683523] env[65758]: WARNING openstack [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1055.696135] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1055.696135] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5289a2f7-f9b5-5b28-7656-2a494ee4d265" [ 1055.696135] env[65758]: _type = "Task" [ 1055.696135] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.707047] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5289a2f7-f9b5-5b28-7656-2a494ee4d265, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.768993] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0bb8b0f0-d7c5-4cbd-b01a-55a4407b424a tempest-ServersV294TestFqdnHostnames-1844820586 tempest-ServersV294TestFqdnHostnames-1844820586-project-member] Lock "37bae4b3-6959-4f44-8600-26a4f859103c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.608s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.794115] env[65758]: DEBUG nova.network.neutron [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "608946a3-79b3-484c-b023-da1a84676162", "address": "fa:16:3e:c8:f2:1d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap608946a3-79", "ovs_interfaceid": "608946a3-79b3-484c-b023-da1a84676162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1055.896016] env[65758]: WARNING neutronclient.v2_0.client [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1055.897048] env[65758]: WARNING openstack [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1055.897332] env[65758]: WARNING openstack [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1055.980148] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b169cfa9-0645-4719-8777-96c062c4852b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.990631] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b8e692-a5e4-47ba-b0de-ae4bf240883a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.028138] env[65758]: DEBUG nova.network.neutron [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [{"id": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "address": "fa:16:3e:01:98:57", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c394c9-9b", "ovs_interfaceid": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1056.031204] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6047568-d27e-41bb-8900-7793dd23a71a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.040851] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f6ac37-3d38-4d74-a4cf-3b75f9937e47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.057314] env[65758]: DEBUG nova.compute.provider_tree [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.209828] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5289a2f7-f9b5-5b28-7656-2a494ee4d265, 'name': SearchDatastore_Task, 'duration_secs': 0.014414} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.210359] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.210547] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1056.210895] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.211097] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.211424] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1056.211772] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85032e89-d707-4488-b1e6-056be640295f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.222278] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1056.222975] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1056.223769] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db7badde-4bb6-49be-b4fd-3c1ce0f802a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.230661] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1056.230661] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525ce5c3-ef74-9013-70c3-c2e8cf43372e" [ 1056.230661] env[65758]: _type = "Task" [ 1056.230661] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.239966] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525ce5c3-ef74-9013-70c3-c2e8cf43372e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.297451] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.298454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.298713] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.299058] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Acquired lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.299500] env[65758]: DEBUG nova.network.neutron [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Refreshing network info cache for port 608946a3-79b3-484c-b023-da1a84676162 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1056.301561] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dce0743-bb59-4255-8daf-b210b697795b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.322015] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1056.322015] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1056.322219] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1056.322271] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1056.322399] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1056.322542] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1056.322755] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1056.322910] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1056.323089] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1056.323258] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1056.323426] env[65758]: DEBUG nova.virt.hardware [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1056.330397] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Reconfiguring VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1056.331655] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85165154-c446-4cb8-85d1-e6a1cc1b40e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.352017] env[65758]: DEBUG oslo_vmware.api [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1056.352017] env[65758]: value = "task-4661083" [ 1056.352017] env[65758]: _type = "Task" [ 1056.352017] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.362823] env[65758]: DEBUG oslo_vmware.api [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661083, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.532358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.560972] env[65758]: DEBUG nova.scheduler.client.report [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.743444] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525ce5c3-ef74-9013-70c3-c2e8cf43372e, 'name': SearchDatastore_Task, 'duration_secs': 0.013204} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.744572] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc79b802-4cf9-4b37-a09f-2a789fa4edf4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.752332] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1056.752332] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522b023c-296d-3199-04d2-0d4d46b69ba8" [ 1056.752332] env[65758]: _type = "Task" [ 1056.752332] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.764299] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522b023c-296d-3199-04d2-0d4d46b69ba8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.806440] env[65758]: WARNING neutronclient.v2_0.client [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1056.807140] env[65758]: WARNING openstack [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1056.807508] env[65758]: WARNING openstack [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1056.864963] env[65758]: DEBUG oslo_vmware.api [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661083, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.973141] env[65758]: WARNING neutronclient.v2_0.client [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1056.974057] env[65758]: WARNING openstack [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1056.974377] env[65758]: WARNING openstack [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1057.072454] env[65758]: DEBUG nova.network.neutron [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updated VIF entry in instance network info cache for port 608946a3-79b3-484c-b023-da1a84676162. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1057.072885] env[65758]: DEBUG nova.network.neutron [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "608946a3-79b3-484c-b023-da1a84676162", "address": "fa:16:3e:c8:f2:1d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap608946a3-79", "ovs_interfaceid": "608946a3-79b3-484c-b023-da1a84676162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1057.264505] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522b023c-296d-3199-04d2-0d4d46b69ba8, 'name': SearchDatastore_Task, 'duration_secs': 0.01788} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.264829] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.265221] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a014debf-2f16-4b30-af78-27a6751060de/a014debf-2f16-4b30-af78-27a6751060de.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1057.265572] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81a15634-f488-406c-ae93-5ce9d9835ac4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.274346] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1057.274346] env[65758]: value = "task-4661084" [ 1057.274346] env[65758]: _type = "Task" [ 1057.274346] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.282992] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.366421] env[65758]: DEBUG oslo_vmware.api [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661083, 'name': ReconfigVM_Task, 'duration_secs': 0.814382} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.366887] env[65758]: WARNING neutronclient.v2_0.client [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1057.367151] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.367358] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Reconfigured VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1057.575973] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.345s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.579241] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Releasing lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.579543] env[65758]: DEBUG nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Received event network-vif-unplugged-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1057.579693] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Acquiring lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.579895] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.580071] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.580243] env[65758]: DEBUG nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] No waiting events found dispatching network-vif-unplugged-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1057.580448] env[65758]: WARNING nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Received unexpected event network-vif-unplugged-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 for instance with vm_state shelved_offloaded and task_state None. [ 1057.580589] env[65758]: DEBUG nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Received event network-changed-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1057.580738] env[65758]: DEBUG nova.compute.manager [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Refreshing instance network info cache due to event network-changed-4288a41e-2eac-4d34-9eb1-c0e6a398cbe1. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1057.580935] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Acquiring lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.581094] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Acquired lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.581273] env[65758]: DEBUG nova.network.neutron [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Refreshing network info cache for port 4288a41e-2eac-4d34-9eb1-c0e6a398cbe1 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1057.582788] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.096s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.582939] env[65758]: DEBUG nova.objects.instance [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'resources' on Instance uuid 89167b37-4c21-4678-a0f0-5a4ce932c4d8 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.787791] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661084, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.872519] env[65758]: DEBUG oslo_concurrency.lockutils [None req-56ab8dbb-1cd2-4781-9fe3-87c90ecab0a8 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.517s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.086284] env[65758]: WARNING neutronclient.v2_0.client [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1058.087430] env[65758]: WARNING openstack [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1058.087807] env[65758]: WARNING openstack [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1058.095741] env[65758]: DEBUG nova.objects.instance [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'numa_topology' on Instance uuid 89167b37-4c21-4678-a0f0-5a4ce932c4d8 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.156930] env[65758]: INFO nova.scheduler.client.report [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted allocation for migration c9995e3c-d411-493e-9f28-8c93e4bf77ec [ 1058.273444] env[65758]: WARNING neutronclient.v2_0.client [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1058.274220] env[65758]: WARNING openstack [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1058.274612] env[65758]: WARNING openstack [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1058.293993] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661084, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631751} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.295062] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a014debf-2f16-4b30-af78-27a6751060de/a014debf-2f16-4b30-af78-27a6751060de.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1058.295062] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1058.295062] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-131c9fb5-cfe0-4252-8678-c15b23033b4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.302904] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1058.302904] env[65758]: value = "task-4661086" [ 1058.302904] env[65758]: _type = "Task" [ 1058.302904] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.315486] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.375013] env[65758]: DEBUG nova.network.neutron [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Updated VIF entry in instance network info cache for port 4288a41e-2eac-4d34-9eb1-c0e6a398cbe1. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1058.375404] env[65758]: DEBUG nova.network.neutron [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Updating instance_info_cache with network_info: [{"id": "4288a41e-2eac-4d34-9eb1-c0e6a398cbe1", "address": "fa:16:3e:c6:9a:3f", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": null, "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4288a41e-2e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1058.600737] env[65758]: DEBUG nova.objects.base [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Object Instance<89167b37-4c21-4678-a0f0-5a4ce932c4d8> lazy-loaded attributes: resources,numa_topology {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1058.662967] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ef5465ca-ea25-4e67-8f01-9e198a4a68e3 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.844s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.792124] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ac707b-b3bc-4588-b81f-ebd8a5560ca6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.801206] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76b572f-f0da-4e7e-bf60-689f5bdfd706 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.813615] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102197} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.839434] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1058.840531] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03c26ab-88b5-4b7a-8215-c762246883a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.843777] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06d6129-e1fa-4170-b34c-243ff3d267cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.866031] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4044bd8b-4307-476c-aa03-e2b71349f736 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.882091] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] a014debf-2f16-4b30-af78-27a6751060de/a014debf-2f16-4b30-af78-27a6751060de.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1058.882819] env[65758]: DEBUG oslo_concurrency.lockutils [req-dfb5632c-7073-4d6c-a479-ef830b7bf0ee req-e6dac653-e1a5-441b-9601-3894279928b8 service nova] Releasing lock "refresh_cache-89167b37-4c21-4678-a0f0-5a4ce932c4d8" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.883277] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5df9a980-3f93-4240-a1fb-d70ef6ed730c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.914208] env[65758]: DEBUG nova.compute.provider_tree [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.917301] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1058.917301] env[65758]: value = "task-4661087" [ 1058.917301] env[65758]: _type = "Task" [ 1058.917301] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.926986] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661087, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.111230] env[65758]: DEBUG nova.compute.manager [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] Received event network-changed-8fa0a611-f7a8-44d4-8921-988332d441bc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1059.111506] env[65758]: DEBUG nova.compute.manager [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] Refreshing instance network info cache due to event network-changed-8fa0a611-f7a8-44d4-8921-988332d441bc. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1059.112135] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Acquiring lock "refresh_cache-a014debf-2f16-4b30-af78-27a6751060de" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.112135] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Acquired lock "refresh_cache-a014debf-2f16-4b30-af78-27a6751060de" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.112135] env[65758]: DEBUG nova.network.neutron [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] Refreshing network info cache for port 8fa0a611-f7a8-44d4-8921-988332d441bc {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1059.396119] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b2484106b4f6f423c8ec89c0d95d9ab4',container_format='bare',created_at=2025-11-21T13:21:15Z,direct_url=,disk_format='vmdk',id=df203c52-cb8e-4277-903c-c114ae8627be,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1673597483-shelved',owner='8be788d761114dfca7244f953b571c7d',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-11-21T13:21:33Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1059.396855] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1059.396855] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1059.396855] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1059.397015] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1059.397172] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1059.397344] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1059.397547] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1059.397701] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1059.397906] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1059.398568] env[65758]: DEBUG nova.virt.hardware [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1059.402522] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6356bbe2-cd87-412f-8dc5-18132b472cea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.414069] env[65758]: DEBUG oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c64b76-4ecb-0fd5-113a-c0769526f9ec/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1059.415761] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d7689f-7ff7-4411-a8ac-19f39c113a20 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.421818] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2586c17b-986f-4549-a588-00246fa84e09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.426461] env[65758]: DEBUG nova.scheduler.client.report [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.320531] env[65758]: WARNING neutronclient.v2_0.client [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1060.324156] env[65758]: WARNING openstack [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1060.324156] env[65758]: WARNING openstack [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1060.332907] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.750s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.337026] env[65758]: DEBUG oslo_concurrency.lockutils [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.337891] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:98:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f87a752-ebb0-49a4-a67b-e356fa45b89b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83c394c9-9b0d-40ad-923c-00e70d63c85a', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1060.347328] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1060.347712] env[65758]: DEBUG oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c64b76-4ecb-0fd5-113a-c0769526f9ec/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1060.347869] env[65758]: ERROR oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c64b76-4ecb-0fd5-113a-c0769526f9ec/disk-0.vmdk due to incomplete transfer. [ 1060.354959] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1060.354959] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7bfc7dc6-bd0f-43c0-8995-1523a00c73ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.358979] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661087, 'name': ReconfigVM_Task, 'duration_secs': 0.429289} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.362931] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eed40e0a-de0f-4897-93d7-401903a8d7d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.375462] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Reconfigured VM instance instance-00000061 to attach disk [datastore1] a014debf-2f16-4b30-af78-27a6751060de/a014debf-2f16-4b30-af78-27a6751060de.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1060.377073] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93255931-0795-4efa-9e51-b68c658df4ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.386510] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1060.386510] env[65758]: value = "task-4661090" [ 1060.386510] env[65758]: _type = "Task" [ 1060.386510] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.387101] env[65758]: DEBUG oslo_vmware.rw_handles [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c64b76-4ecb-0fd5-113a-c0769526f9ec/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1060.387101] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Uploaded image 2edcb03c-85ab-4d21-8ff4-b3b47fae6985 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1060.390038] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1060.392600] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-03892861-9264-4bf2-ada5-2d9982b7699e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.395029] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1060.395029] env[65758]: value = "task-4661089" [ 1060.395029] env[65758]: _type = "Task" [ 1060.395029] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.408285] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1060.408285] env[65758]: value = "task-4661091" [ 1060.408285] env[65758]: _type = "Task" [ 1060.408285] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.412733] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661090, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.420771] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661089, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.427793] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661091, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.634452] env[65758]: WARNING neutronclient.v2_0.client [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1060.635036] env[65758]: WARNING openstack [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1060.635587] env[65758]: WARNING openstack [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1060.813490] env[65758]: DEBUG nova.network.neutron [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] Updated VIF entry in instance network info cache for port 8fa0a611-f7a8-44d4-8921-988332d441bc. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1060.814020] env[65758]: DEBUG nova.network.neutron [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] Updating instance_info_cache with network_info: [{"id": "8fa0a611-f7a8-44d4-8921-988332d441bc", "address": "fa:16:3e:7c:8c:aa", "network": {"id": "de517dde-5ce2-4834-a3e3-a7e172ed87cc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1593710302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d1056adfc646858ba42771ad01c221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fa0a611-f7", "ovs_interfaceid": "8fa0a611-f7a8-44d4-8921-988332d441bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1060.863030] env[65758]: DEBUG oslo_concurrency.lockutils [None req-0aa75fb5-12f3-439d-a93e-eacdf40a62e7 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.225s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.863548] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-f6931aa0-2403-4052-97bb-c06158af9887" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.864088] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-f6931aa0-2403-4052-97bb-c06158af9887" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.864653] env[65758]: DEBUG nova.objects.instance [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'flavor' on Instance uuid 5fc4f1b8-9024-4155-b56d-56a8d08f0259 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.866860] env[65758]: DEBUG oslo_concurrency.lockutils [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.530s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.867746] env[65758]: DEBUG oslo_concurrency.lockutils [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.868113] env[65758]: DEBUG oslo_concurrency.lockutils [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.868443] env[65758]: DEBUG oslo_concurrency.lockutils [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.871117] env[65758]: INFO nova.compute.manager [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Terminating instance [ 1060.904978] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661090, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.915334] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661089, 'name': Rename_Task, 'duration_secs': 0.243689} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.919073] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1060.919412] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c998d26-6e72-4c62-af8d-6f35500e2d10 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.928865] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661091, 'name': Destroy_Task, 'duration_secs': 0.446954} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.933318] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Destroyed the VM [ 1060.933318] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1060.933318] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1060.933318] env[65758]: value = "task-4661092" [ 1060.933318] env[65758]: _type = "Task" [ 1060.933318] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.933318] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-37ecbc1f-fc31-4021-ade2-44f3a286a0a6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.943071] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.944849] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1060.944849] env[65758]: value = "task-4661093" [ 1060.944849] env[65758]: _type = "Task" [ 1060.944849] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.954830] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661093, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.319743] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Releasing lock "refresh_cache-a014debf-2f16-4b30-af78-27a6751060de" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.319933] env[65758]: DEBUG nova.compute.manager [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received event network-vif-plugged-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1061.320211] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Acquiring lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.320475] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.320617] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.320852] env[65758]: DEBUG nova.compute.manager [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] No waiting events found dispatching network-vif-plugged-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1061.320988] env[65758]: WARNING nova.compute.manager [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received unexpected event network-vif-plugged-83c394c9-9b0d-40ad-923c-00e70d63c85a for instance with vm_state shelved_offloaded and task_state spawning. [ 1061.321209] env[65758]: DEBUG nova.compute.manager [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received event network-changed-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1061.321426] env[65758]: DEBUG nova.compute.manager [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Refreshing instance network info cache due to event network-changed-83c394c9-9b0d-40ad-923c-00e70d63c85a. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1061.321613] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Acquiring lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.321744] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Acquired lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.321993] env[65758]: DEBUG nova.network.neutron [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Refreshing network info cache for port 83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1061.375042] env[65758]: WARNING neutronclient.v2_0.client [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1061.375817] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1061.376399] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1061.385681] env[65758]: DEBUG nova.compute.manager [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1061.386079] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1061.386466] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5bce399-be65-4e2f-92a8-5f63840fcfc8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.403669] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375727b1-6c07-4a85-9fed-8885b13762be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.419610] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661090, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.451843] env[65758]: WARNING nova.virt.vmwareapi.vmops [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 89167b37-4c21-4678-a0f0-5a4ce932c4d8 could not be found. [ 1061.452115] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1061.452314] env[65758]: INFO nova.compute.manager [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1061.452668] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1061.456662] env[65758]: DEBUG nova.compute.manager [-] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1061.456772] env[65758]: DEBUG nova.network.neutron [-] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1061.457043] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1061.457615] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1061.457930] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1061.474402] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661092, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.480911] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661093, 'name': RemoveSnapshot_Task, 'duration_secs': 0.525507} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.481257] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1061.481554] env[65758]: DEBUG nova.compute.manager [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1061.482417] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51b0861-6d05-4d4c-804e-a0dc87aa8b9e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.505588] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1061.522987] env[65758]: DEBUG nova.objects.instance [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'pci_requests' on Instance uuid 5fc4f1b8-9024-4155-b56d-56a8d08f0259 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.569101] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "e6159a35-f073-4931-b0b0-832a88680356" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.569390] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.570186] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "e6159a35-f073-4931-b0b0-832a88680356-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.570397] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.570577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.577354] env[65758]: INFO nova.compute.manager [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Terminating instance [ 1061.825105] env[65758]: WARNING neutronclient.v2_0.client [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1061.826018] env[65758]: WARNING openstack [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1061.826526] env[65758]: WARNING openstack [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1061.905223] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661090, 'name': CreateVM_Task, 'duration_secs': 1.380113} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.905450] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.906876] env[65758]: WARNING neutronclient.v2_0.client [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1061.906876] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.906876] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "[datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.907179] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1061.907817] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44084de7-c475-4000-967f-75964845f0da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.914074] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1061.914074] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b070db-c17e-e009-09ec-264ceb0dc572" [ 1061.914074] env[65758]: _type = "Task" [ 1061.914074] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.925646] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b070db-c17e-e009-09ec-264ceb0dc572, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.944470] env[65758]: DEBUG oslo_vmware.api [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661092, 'name': PowerOnVM_Task, 'duration_secs': 0.563011} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.944772] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1061.944950] env[65758]: INFO nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Took 9.57 seconds to spawn the instance on the hypervisor. [ 1061.945168] env[65758]: DEBUG nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1061.945988] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280ce55c-eac0-4691-8fec-651075965c9f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.998408] env[65758]: INFO nova.compute.manager [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Shelve offloading [ 1062.020650] env[65758]: WARNING neutronclient.v2_0.client [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1062.021028] env[65758]: WARNING openstack [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1062.021517] env[65758]: WARNING openstack [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1062.031999] env[65758]: DEBUG nova.objects.base [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Object Instance<5fc4f1b8-9024-4155-b56d-56a8d08f0259> lazy-loaded attributes: flavor,pci_requests {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1062.032236] env[65758]: DEBUG nova.network.neutron [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1062.032532] env[65758]: WARNING neutronclient.v2_0.client [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1062.032872] env[65758]: WARNING neutronclient.v2_0.client [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1062.033460] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1062.033840] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1062.082909] env[65758]: DEBUG nova.compute.manager [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1062.083750] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1062.084434] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c987d9b-82a0-471f-8aff-068c9b995938 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.093616] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1062.093919] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1907bc57-05aa-486e-9c8f-bb4eb2d90066 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.101056] env[65758]: DEBUG oslo_vmware.api [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1062.101056] env[65758]: value = "task-4661094" [ 1062.101056] env[65758]: _type = "Task" [ 1062.101056] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.116450] env[65758]: DEBUG oslo_vmware.api [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661094, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.133845] env[65758]: DEBUG nova.policy [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1062.151510] env[65758]: DEBUG nova.network.neutron [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updated VIF entry in instance network info cache for port 83c394c9-9b0d-40ad-923c-00e70d63c85a. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1062.151870] env[65758]: DEBUG nova.network.neutron [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [{"id": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "address": "fa:16:3e:01:98:57", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c394c9-9b", "ovs_interfaceid": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1062.277675] env[65758]: DEBUG nova.network.neutron [-] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1062.430506] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "[datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.430818] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Processing image df203c52-cb8e-4277-903c-c114ae8627be {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.430974] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be/df203c52-cb8e-4277-903c-c114ae8627be.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.431145] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "[datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be/df203c52-cb8e-4277-903c-c114ae8627be.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.431327] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.431976] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-431fb640-061d-403f-ab86-2ce929d88d8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.439279] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52960262-0d04-f3e1-6f30-aa38bc124116/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1062.440339] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66993170-dcfd-4420-95a3-a0b8fe964184 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.444493] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.444690] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1062.445806] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e84d3b4a-06e0-4f86-876e-62cdde9d38fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.450312] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52960262-0d04-f3e1-6f30-aa38bc124116/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1062.450552] env[65758]: ERROR oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52960262-0d04-f3e1-6f30-aa38bc124116/disk-0.vmdk due to incomplete transfer. [ 1062.451267] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f30f5b82-1067-4a3f-b64f-61e4108ba55b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.455235] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1062.455235] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5240eb94-5989-1cfe-ef53-1791b0e97649" [ 1062.455235] env[65758]: _type = "Task" [ 1062.455235] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.467582] env[65758]: INFO nova.compute.manager [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Took 14.81 seconds to build instance. [ 1062.475000] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Preparing fetch location {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1062.475000] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Fetch image to [datastore2] OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498/OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498.vmdk {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1062.475493] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Downloading stream optimized image df203c52-cb8e-4277-903c-c114ae8627be to [datastore2] OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498/OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498.vmdk on the data store datastore2 as vApp {{(pid=65758) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1062.475750] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Downloading image file data df203c52-cb8e-4277-903c-c114ae8627be to the ESX as VM named 'OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498' {{(pid=65758) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1062.477997] env[65758]: DEBUG oslo_vmware.rw_handles [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52960262-0d04-f3e1-6f30-aa38bc124116/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1062.478202] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Uploaded image 109273e3-b0b7-4090-8a28-89c1405c1b9c to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1062.480401] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1062.480972] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-19684079-1f57-4fa2-8a7b-0f6a3943d5ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.490687] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1062.490687] env[65758]: value = "task-4661095" [ 1062.490687] env[65758]: _type = "Task" [ 1062.490687] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.504216] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1062.504548] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661095, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.523043] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c1d0816-5040-45eb-9907-4a7f258985a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.530958] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1062.530958] env[65758]: value = "task-4661096" [ 1062.530958] env[65758]: _type = "Task" [ 1062.530958] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.542417] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1062.542671] env[65758]: DEBUG nova.compute.manager [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1062.544014] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d1b674-be76-4901-b45b-2833325f9dd3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.550863] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.551099] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.551294] env[65758]: DEBUG nova.network.neutron [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1062.572811] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1062.572811] env[65758]: value = "resgroup-9" [ 1062.572811] env[65758]: _type = "ResourcePool" [ 1062.572811] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1062.572811] env[65758]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-29c42ab1-c007-4f84-8265-6f4afac68abb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.594891] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lease: (returnval){ [ 1062.594891] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f810f6-bec8-9664-b054-3e4831fab79a" [ 1062.594891] env[65758]: _type = "HttpNfcLease" [ 1062.594891] env[65758]: } obtained for vApp import into resource pool (val){ [ 1062.594891] env[65758]: value = "resgroup-9" [ 1062.594891] env[65758]: _type = "ResourcePool" [ 1062.594891] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1062.595216] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the lease: (returnval){ [ 1062.595216] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f810f6-bec8-9664-b054-3e4831fab79a" [ 1062.595216] env[65758]: _type = "HttpNfcLease" [ 1062.595216] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1062.606583] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1062.606583] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f810f6-bec8-9664-b054-3e4831fab79a" [ 1062.606583] env[65758]: _type = "HttpNfcLease" [ 1062.606583] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1062.612204] env[65758]: DEBUG oslo_vmware.api [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661094, 'name': PowerOffVM_Task, 'duration_secs': 0.263587} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.612506] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.612621] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1062.612882] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fd618b1-d0bd-442a-8345-8868d548e274 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.655472] env[65758]: DEBUG oslo_concurrency.lockutils [req-0f7558c3-266e-46f2-b4e0-dbedf404751e req-65e117a3-fe85-49a7-a7e6-bee8d133d504 service nova] Releasing lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.687929] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1062.688177] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1062.688282] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleting the datastore file [datastore2] e6159a35-f073-4931-b0b0-832a88680356 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1062.689033] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29f7f7fb-4289-4dbd-a27d-434d28dc9abb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.697248] env[65758]: DEBUG oslo_vmware.api [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1062.697248] env[65758]: value = "task-4661099" [ 1062.697248] env[65758]: _type = "Task" [ 1062.697248] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.706979] env[65758]: DEBUG oslo_vmware.api [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.781137] env[65758]: INFO nova.compute.manager [-] [instance: 89167b37-4c21-4678-a0f0-5a4ce932c4d8] Took 1.32 seconds to deallocate network for instance. [ 1062.969800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-056721af-f399-4124-a9ac-e2b06e51e548 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "a014debf-2f16-4b30-af78-27a6751060de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.325s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.003071] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661095, 'name': Destroy_Task, 'duration_secs': 0.380395} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.003200] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Destroyed the VM [ 1063.003453] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1063.004140] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e468e5e0-71a6-43c2-9979-b8a5d636a4d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.011563] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1063.011563] env[65758]: value = "task-4661100" [ 1063.011563] env[65758]: _type = "Task" [ 1063.011563] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.021254] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661100, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.054351] env[65758]: WARNING neutronclient.v2_0.client [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1063.055144] env[65758]: WARNING openstack [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1063.055496] env[65758]: WARNING openstack [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1063.104856] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1063.104856] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f810f6-bec8-9664-b054-3e4831fab79a" [ 1063.104856] env[65758]: _type = "HttpNfcLease" [ 1063.104856] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1063.208855] env[65758]: DEBUG oslo_vmware.api [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198553} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.209253] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1063.209543] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1063.209823] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1063.210112] env[65758]: INFO nova.compute.manager [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: e6159a35-f073-4931-b0b0-832a88680356] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1063.210483] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1063.216377] env[65758]: DEBUG nova.compute.manager [-] [instance: e6159a35-f073-4931-b0b0-832a88680356] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1063.216462] env[65758]: DEBUG nova.network.neutron [-] [instance: e6159a35-f073-4931-b0b0-832a88680356] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1063.216706] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1063.217240] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1063.217497] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1063.255212] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1063.522731] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661100, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.604656] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1063.604656] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f810f6-bec8-9664-b054-3e4831fab79a" [ 1063.604656] env[65758]: _type = "HttpNfcLease" [ 1063.604656] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1063.605131] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1063.605131] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f810f6-bec8-9664-b054-3e4831fab79a" [ 1063.605131] env[65758]: _type = "HttpNfcLease" [ 1063.605131] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1063.605919] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2692764-583a-4a3e-80e1-4da9c6b99ea7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.614841] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642039-9aef-4b09-2f91-49c63386c18d/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1063.614947] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642039-9aef-4b09-2f91-49c63386c18d/disk-0.vmdk. {{(pid=65758) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1063.681385] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7f7e8802-75ff-4860-9ad5-eeb98bdfd277 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.690269] env[65758]: DEBUG nova.network.neutron [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Successfully updated port: f6931aa0-2403-4052-97bb-c06158af9887 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1063.766337] env[65758]: WARNING neutronclient.v2_0.client [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1063.768930] env[65758]: WARNING openstack [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1063.768930] env[65758]: WARNING openstack [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1063.777709] env[65758]: DEBUG nova.network.neutron [-] [instance: e6159a35-f073-4931-b0b0-832a88680356] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1063.817000] env[65758]: DEBUG oslo_concurrency.lockutils [None req-784d2199-7b95-4f9c-9087-8162de438979 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "89167b37-4c21-4678-a0f0-5a4ce932c4d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.950s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.916183] env[65758]: DEBUG nova.network.neutron [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [{"id": "2adc4687-14f6-4742-8afd-a86473befd61", "address": "fa:16:3e:63:9e:d9", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc4687-14", "ovs_interfaceid": "2adc4687-14f6-4742-8afd-a86473befd61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1063.933351] env[65758]: DEBUG nova.compute.manager [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1063.939632] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbc0587-dac7-4bf0-90c5-c5651bea4e6d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.026313] env[65758]: DEBUG nova.compute.manager [req-ade5f98b-2837-4cf8-87ff-38d3ba0195ec req-b3801fe0-14d9-417b-a05d-fbf863a0a609 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-vif-plugged-f6931aa0-2403-4052-97bb-c06158af9887 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1064.026612] env[65758]: DEBUG oslo_concurrency.lockutils [req-ade5f98b-2837-4cf8-87ff-38d3ba0195ec req-b3801fe0-14d9-417b-a05d-fbf863a0a609 service nova] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.027203] env[65758]: DEBUG oslo_concurrency.lockutils [req-ade5f98b-2837-4cf8-87ff-38d3ba0195ec req-b3801fe0-14d9-417b-a05d-fbf863a0a609 service nova] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.027203] env[65758]: DEBUG oslo_concurrency.lockutils [req-ade5f98b-2837-4cf8-87ff-38d3ba0195ec req-b3801fe0-14d9-417b-a05d-fbf863a0a609 service nova] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.027387] env[65758]: DEBUG nova.compute.manager [req-ade5f98b-2837-4cf8-87ff-38d3ba0195ec req-b3801fe0-14d9-417b-a05d-fbf863a0a609 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] No waiting events found dispatching network-vif-plugged-f6931aa0-2403-4052-97bb-c06158af9887 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1064.027530] env[65758]: WARNING nova.compute.manager [req-ade5f98b-2837-4cf8-87ff-38d3ba0195ec req-b3801fe0-14d9-417b-a05d-fbf863a0a609 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received unexpected event network-vif-plugged-f6931aa0-2403-4052-97bb-c06158af9887 for instance with vm_state active and task_state None. [ 1064.038023] env[65758]: DEBUG oslo_vmware.api [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661100, 'name': RemoveSnapshot_Task, 'duration_secs': 0.658546} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.038023] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1064.038023] env[65758]: INFO nova.compute.manager [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Took 14.43 seconds to snapshot the instance on the hypervisor. [ 1064.050296] env[65758]: DEBUG nova.compute.manager [req-486ca21d-2319-40a1-b044-53d8b83e2a34 req-fa758cbf-f1e2-473a-9920-4a96fe52ba52 service nova] [instance: e6159a35-f073-4931-b0b0-832a88680356] Received event network-vif-deleted-b0b8d361-f2b6-4d7d-b483-cfc0d9dc2c89 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1064.195806] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.196157] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.196372] env[65758]: DEBUG nova.network.neutron [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1064.284587] env[65758]: INFO nova.compute.manager [-] [instance: e6159a35-f073-4931-b0b0-832a88680356] Took 1.07 seconds to deallocate network for instance. [ 1064.422644] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.423106] env[65758]: WARNING neutronclient.v2_0.client [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1064.423824] env[65758]: WARNING openstack [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1064.424314] env[65758]: WARNING openstack [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1064.429949] env[65758]: WARNING neutronclient.v2_0.client [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1064.461742] env[65758]: INFO nova.compute.manager [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] instance snapshotting [ 1064.467422] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d186e8-9010-4416-bf9a-5b8da728ea33 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.493265] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84afb8f-d30d-4808-9659-3fb9e263353a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.597480] env[65758]: DEBUG nova.compute.manager [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Found 3 images (rotation: 2) {{(pid=65758) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5057}} [ 1064.597480] env[65758]: DEBUG nova.compute.manager [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Rotating out 1 backups {{(pid=65758) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5065}} [ 1064.597480] env[65758]: DEBUG nova.compute.manager [None req-5d183493-1c05-49d9-b79c-377dbfe75ac5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleting image fcf58575-c665-48d7-add3-26ecbec71675 {{(pid=65758) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5070}} [ 1064.700027] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1064.700425] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1064.765520] env[65758]: WARNING nova.network.neutron [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] 2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4 already exists in list: networks containing: ['2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4']. ignoring it [ 1064.765520] env[65758]: WARNING nova.network.neutron [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] 2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4 already exists in list: networks containing: ['2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4']. ignoring it [ 1064.792679] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.793024] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.793297] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.817882] env[65758]: INFO nova.scheduler.client.report [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted allocations for instance e6159a35-f073-4931-b0b0-832a88680356 [ 1064.850412] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Completed reading data from the image iterator. {{(pid=65758) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1064.850820] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642039-9aef-4b09-2f91-49c63386c18d/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1064.851903] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3061651-460d-4a36-bda6-b2c524e97e36 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.861843] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642039-9aef-4b09-2f91-49c63386c18d/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1064.862192] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642039-9aef-4b09-2f91-49c63386c18d/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1064.862556] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d7558626-57cb-4bcf-b8b4-be0abd2951b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.906178] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.907639] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05538b8b-82a0-4123-a857-a0399deee098 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.919078] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.919078] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-394232c9-42d4-4f42-850c-44cf342a1da6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.948807] env[65758]: WARNING neutronclient.v2_0.client [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1064.949568] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1064.949993] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1065.004127] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.004434] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.004663] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleting the datastore file [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.004989] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af744c0e-a214-4582-a8ce-6be6666955a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.008824] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1065.008922] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-598d97ff-622c-47fb-91f5-a3e4dbadcca0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.017925] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1065.017925] env[65758]: value = "task-4661103" [ 1065.017925] env[65758]: _type = "Task" [ 1065.017925] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.022766] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1065.022766] env[65758]: value = "task-4661104" [ 1065.022766] env[65758]: _type = "Task" [ 1065.022766] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.032472] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.039031] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661104, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.080861] env[65758]: DEBUG oslo_vmware.rw_handles [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642039-9aef-4b09-2f91-49c63386c18d/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1065.081150] env[65758]: INFO nova.virt.vmwareapi.images [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Downloaded image file data df203c52-cb8e-4277-903c-c114ae8627be [ 1065.082801] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f18c67a-ddb1-4ec0-801e-23a824340be0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.105227] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-078c8c11-6338-406a-a63a-4c3f64545a9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.137953] env[65758]: INFO nova.virt.vmwareapi.images [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] The imported VM was unregistered [ 1065.140744] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Caching image {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1065.141176] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Creating directory with path [datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1065.141803] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fb113ec-f609-4312-ac64-85fb7f61e545 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.170246] env[65758]: WARNING neutronclient.v2_0.client [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1065.171080] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1065.171465] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1065.185384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.185687] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.187069] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Created directory with path [datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1065.187230] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498/OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498.vmdk to [datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be/df203c52-cb8e-4277-903c-c114ae8627be.vmdk. {{(pid=65758) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1065.187845] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-637f14f0-3c9c-4b2f-99cb-f9b97481f5dd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.198041] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1065.198041] env[65758]: value = "task-4661106" [ 1065.198041] env[65758]: _type = "Task" [ 1065.198041] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.209062] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661106, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.329439] env[65758]: DEBUG oslo_concurrency.lockutils [None req-771f7391-cf24-44ef-ad19-f8fd35472455 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "e6159a35-f073-4931-b0b0-832a88680356" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.760s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.378096] env[65758]: WARNING neutronclient.v2_0.client [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1065.378822] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1065.379585] env[65758]: WARNING openstack [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1065.476857] env[65758]: DEBUG nova.network.neutron [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "608946a3-79b3-484c-b023-da1a84676162", "address": "fa:16:3e:c8:f2:1d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap608946a3-79", "ovs_interfaceid": "608946a3-79b3-484c-b023-da1a84676162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f6931aa0-2403-4052-97bb-c06158af9887", "address": "fa:16:3e:5a:e4:1c", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6931aa0-24", "ovs_interfaceid": "f6931aa0-2403-4052-97bb-c06158af9887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1065.536053] env[65758]: DEBUG oslo_vmware.api [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.313465} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.540754] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.541097] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.541388] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.544558] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661104, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.570845] env[65758]: INFO nova.scheduler.client.report [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted allocations for instance afc1eb16-c275-4b3b-a7fe-9938d2241e24 [ 1065.691762] env[65758]: DEBUG nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1065.709404] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661106, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.981165] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.981606] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.981771] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.982809] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64de4e11-73f7-454a-94e3-d2e5fafc9815 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.002602] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1066.002888] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1066.003056] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1066.003614] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1066.003614] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1066.003614] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1066.003825] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1066.003903] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1066.004093] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1066.004272] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1066.004475] env[65758]: DEBUG nova.virt.hardware [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1066.011385] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Reconfiguring VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1066.012267] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efebef25-041c-4f7f-bcc1-80b0bcc728dd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.034597] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661104, 'name': CreateSnapshot_Task, 'duration_secs': 0.788097} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.036225] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1066.036589] env[65758]: DEBUG oslo_vmware.api [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1066.036589] env[65758]: value = "task-4661108" [ 1066.036589] env[65758]: _type = "Task" [ 1066.036589] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.037359] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a40473-da9d-42ff-acc6-b1a8dbc5adf2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.055331] env[65758]: DEBUG oslo_vmware.api [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.076140] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.077032] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.077032] env[65758]: DEBUG nova.objects.instance [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'resources' on Instance uuid afc1eb16-c275-4b3b-a7fe-9938d2241e24 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.216735] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661106, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.223008] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.255690] env[65758]: DEBUG nova.compute.manager [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-changed-f6931aa0-2403-4052-97bb-c06158af9887 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1066.255905] env[65758]: DEBUG nova.compute.manager [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Refreshing instance network info cache due to event network-changed-f6931aa0-2403-4052-97bb-c06158af9887. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1066.256463] env[65758]: DEBUG oslo_concurrency.lockutils [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] Acquiring lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.256622] env[65758]: DEBUG oslo_concurrency.lockutils [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] Acquired lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.256832] env[65758]: DEBUG nova.network.neutron [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Refreshing network info cache for port f6931aa0-2403-4052-97bb-c06158af9887 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1066.277415] env[65758]: DEBUG nova.compute.manager [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received event network-vif-unplugged-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1066.277652] env[65758]: DEBUG oslo_concurrency.lockutils [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.277863] env[65758]: DEBUG oslo_concurrency.lockutils [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.278297] env[65758]: DEBUG oslo_concurrency.lockutils [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.278670] env[65758]: DEBUG nova.compute.manager [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] No waiting events found dispatching network-vif-unplugged-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1066.279061] env[65758]: WARNING nova.compute.manager [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received unexpected event network-vif-unplugged-2adc4687-14f6-4742-8afd-a86473befd61 for instance with vm_state shelved_offloaded and task_state None. [ 1066.279061] env[65758]: DEBUG nova.compute.manager [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received event network-changed-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1066.279293] env[65758]: DEBUG nova.compute.manager [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Refreshing instance network info cache due to event network-changed-2adc4687-14f6-4742-8afd-a86473befd61. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1066.279600] env[65758]: DEBUG oslo_concurrency.lockutils [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Acquiring lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.279768] env[65758]: DEBUG oslo_concurrency.lockutils [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Acquired lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.279929] env[65758]: DEBUG nova.network.neutron [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Refreshing network info cache for port 2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1066.356398] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.356646] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.553218] env[65758]: DEBUG oslo_vmware.api [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.564479] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1066.564846] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1161d0f0-12e4-4488-9abd-524c3b71686b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.576239] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1066.576239] env[65758]: value = "task-4661109" [ 1066.576239] env[65758]: _type = "Task" [ 1066.576239] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.580964] env[65758]: DEBUG nova.objects.instance [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'numa_topology' on Instance uuid afc1eb16-c275-4b3b-a7fe-9938d2241e24 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.592933] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661109, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.713757] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661106, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.760210] env[65758]: WARNING neutronclient.v2_0.client [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1066.761160] env[65758]: WARNING openstack [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1066.761535] env[65758]: WARNING openstack [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1066.783326] env[65758]: WARNING neutronclient.v2_0.client [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1066.784334] env[65758]: WARNING openstack [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1066.784826] env[65758]: WARNING openstack [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1066.859476] env[65758]: DEBUG nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1067.004049] env[65758]: WARNING neutronclient.v2_0.client [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1067.005010] env[65758]: WARNING openstack [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.005585] env[65758]: WARNING openstack [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.016617] env[65758]: WARNING neutronclient.v2_0.client [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1067.017398] env[65758]: WARNING openstack [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1067.018033] env[65758]: WARNING openstack [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1067.054435] env[65758]: DEBUG oslo_vmware.api [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.084199] env[65758]: DEBUG nova.objects.base [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1067.094227] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661109, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.138217] env[65758]: DEBUG nova.network.neutron [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updated VIF entry in instance network info cache for port f6931aa0-2403-4052-97bb-c06158af9887. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1067.138217] env[65758]: DEBUG nova.network.neutron [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "608946a3-79b3-484c-b023-da1a84676162", "address": "fa:16:3e:c8:f2:1d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap608946a3-79", "ovs_interfaceid": "608946a3-79b3-484c-b023-da1a84676162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f6931aa0-2403-4052-97bb-c06158af9887", "address": "fa:16:3e:5a:e4:1c", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6931aa0-24", "ovs_interfaceid": "f6931aa0-2403-4052-97bb-c06158af9887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1067.152362] env[65758]: DEBUG nova.network.neutron [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updated VIF entry in instance network info cache for port 2adc4687-14f6-4742-8afd-a86473befd61. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1067.155055] env[65758]: DEBUG nova.network.neutron [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [{"id": "2adc4687-14f6-4742-8afd-a86473befd61", "address": "fa:16:3e:63:9e:d9", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2adc4687-14", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1067.213899] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661106, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.229443] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.229708] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.229896] env[65758]: DEBUG nova.compute.manager [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1067.231088] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6299193-33f1-4471-b15d-28692db82d49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.240629] env[65758]: DEBUG nova.compute.manager [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1067.241568] env[65758]: DEBUG nova.objects.instance [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'flavor' on Instance uuid 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.318364] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daaef55-98af-4cc0-8ea5-6a92e58ca231 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.328123] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30eaab78-877b-4531-adb0-959c61469834 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.367875] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aab946c-faf5-462c-999e-d405928497b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.381248] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c44823-daa5-49a4-b6cc-bddb01609ca2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.389384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.403615] env[65758]: DEBUG nova.compute.provider_tree [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.554977] env[65758]: DEBUG oslo_vmware.api [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.590853] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661109, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.642823] env[65758]: DEBUG oslo_concurrency.lockutils [req-31a2ab00-2d49-4f05-856a-a44be7550537 req-a80efb86-51bc-4c51-a208-09942edf7ce2 service nova] Releasing lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.660027] env[65758]: DEBUG oslo_concurrency.lockutils [req-94a9df09-0e17-4ac4-8700-a667a48519e0 req-eedfc0f9-504d-4d4e-833d-730838ce8e7d service nova] Releasing lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.715917] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661106, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.907104] env[65758]: DEBUG nova.scheduler.client.report [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.053639] env[65758]: DEBUG oslo_vmware.api [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661108, 'name': ReconfigVM_Task, 'duration_secs': 1.614754} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.054040] env[65758]: WARNING neutronclient.v2_0.client [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1068.054295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.054533] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Reconfigured VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1068.091636] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661109, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.149972] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.215551] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661106, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.739149} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.215862] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498/OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498.vmdk to [datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be/df203c52-cb8e-4277-903c-c114ae8627be.vmdk. [ 1068.216064] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Cleaning up location [datastore2] OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498 {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1068.216463] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_d75aad99-91f5-4865-aa30-1a126e291498 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.216559] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9e5aac4-2169-4ef4-a7ca-d4176e745013 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.224788] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1068.224788] env[65758]: value = "task-4661111" [ 1068.224788] env[65758]: _type = "Task" [ 1068.224788] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.235348] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.253290] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1068.253649] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a674b255-4711-4209-99ef-65fff39c42bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.263674] env[65758]: DEBUG oslo_vmware.api [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1068.263674] env[65758]: value = "task-4661112" [ 1068.263674] env[65758]: _type = "Task" [ 1068.263674] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.274422] env[65758]: DEBUG oslo_vmware.api [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661112, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.413484] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.337s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.416715] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.194s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.418221] env[65758]: INFO nova.compute.claims [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1068.560007] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9388c1e0-552a-4adc-a729-8d11c7143e31 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-f6931aa0-2403-4052-97bb-c06158af9887" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.696s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.597556] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661109, 'name': CloneVM_Task} progress is 95%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.737141] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.738457] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.738457] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "[datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be/df203c52-cb8e-4277-903c-c114ae8627be.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.738457] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be/df203c52-cb8e-4277-903c-c114ae8627be.vmdk to [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50/63b744d2-541a-42e3-9717-b06a4459fd50.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1068.738457] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc16c6cc-78c5-4248-8e41-3e7d9747769e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.749504] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1068.749504] env[65758]: value = "task-4661113" [ 1068.749504] env[65758]: _type = "Task" [ 1068.749504] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.760572] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.775327] env[65758]: DEBUG oslo_vmware.api [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661112, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.930377] env[65758]: DEBUG oslo_concurrency.lockutils [None req-773c93c8-077f-4a39-8abc-e5b8f9a69733 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.342s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.931790] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.782s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.932122] env[65758]: INFO nova.compute.manager [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Unshelving [ 1069.091825] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661109, 'name': CloneVM_Task, 'duration_secs': 2.434298} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.092188] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Created linked-clone VM from snapshot [ 1069.092997] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915a9d77-bbc0-4c11-a06e-875d4ee3c370 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.103883] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Uploading image 4716357a-6eaf-4452-a4eb-55c3224666a3 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1069.133649] env[65758]: DEBUG oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1069.133649] env[65758]: value = "vm-910040" [ 1069.133649] env[65758]: _type = "VirtualMachine" [ 1069.133649] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1069.133997] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b1113e99-905e-497a-9692-3bc8ba7d4ba7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.145080] env[65758]: DEBUG oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lease: (returnval){ [ 1069.145080] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528d9a72-1a95-7709-9622-b457e67cce57" [ 1069.145080] env[65758]: _type = "HttpNfcLease" [ 1069.145080] env[65758]: } obtained for exporting VM: (result){ [ 1069.145080] env[65758]: value = "vm-910040" [ 1069.145080] env[65758]: _type = "VirtualMachine" [ 1069.145080] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1069.145417] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the lease: (returnval){ [ 1069.145417] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528d9a72-1a95-7709-9622-b457e67cce57" [ 1069.145417] env[65758]: _type = "HttpNfcLease" [ 1069.145417] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1069.153651] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1069.153651] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528d9a72-1a95-7709-9622-b457e67cce57" [ 1069.153651] env[65758]: _type = "HttpNfcLease" [ 1069.153651] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1069.261953] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661113, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.275383] env[65758]: DEBUG oslo_vmware.api [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661112, 'name': PowerOffVM_Task, 'duration_secs': 0.531098} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.275680] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1069.275905] env[65758]: DEBUG nova.compute.manager [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1069.276853] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bae596-80c8-430b-b9ce-22d4aa4e526b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.655218] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1069.655218] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528d9a72-1a95-7709-9622-b457e67cce57" [ 1069.655218] env[65758]: _type = "HttpNfcLease" [ 1069.655218] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1069.658663] env[65758]: DEBUG oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1069.658663] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528d9a72-1a95-7709-9622-b457e67cce57" [ 1069.658663] env[65758]: _type = "HttpNfcLease" [ 1069.658663] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1069.659710] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9996f22a-ca3e-4fb2-b39a-05d2868e0953 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.670284] env[65758]: DEBUG oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5204aa4b-c2f0-f0b1-0805-7e567a0b04aa/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1069.670508] env[65758]: DEBUG oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5204aa4b-c2f0-f0b1-0805-7e567a0b04aa/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1069.673631] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d00197-ef3e-4d4f-a524-cac4fe7c9fbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.747073] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77bd24a-7f87-457a-b398-cb084fdfe43f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.762304] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661113, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.795039] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152436db-4501-4049-836f-f4597213a14f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.802040] env[65758]: DEBUG oslo_concurrency.lockutils [None req-62a07b63-2017-4dee-949d-fbc30f71760f tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.572s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.812940] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef4761d-9c6f-426c-a381-310e46c709e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.833360] env[65758]: DEBUG nova.compute.provider_tree [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.838263] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cdafb037-49e5-4efd-802a-03eded0e5f55 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.945670] env[65758]: DEBUG nova.compute.utils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1070.263312] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661113, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.307119] env[65758]: INFO nova.compute.manager [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Rebuilding instance [ 1070.339060] env[65758]: DEBUG nova.scheduler.client.report [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.375472] env[65758]: DEBUG nova.compute.manager [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1070.376951] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6619e87a-815d-41fe-a507-f6b89a35e8f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.451508] env[65758]: INFO nova.virt.block_device [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Booting with volume 584fc235-4162-403c-abe8-2188f52e0331 at /dev/sdb [ 1070.493973] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e42e1066-5adc-49af-845c-3a6923dc426a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.506066] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a3cdd1-c98e-43c1-ab99-b11bc239f5ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.550610] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1c08a6e-b70a-4a45-b115-52b8c1b88ee8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.564131] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf290873-ff19-4d37-8be3-372b61c4fe4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.590031] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-608946a3-79b3-484c-b023-da1a84676162" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.590689] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-608946a3-79b3-484c-b023-da1a84676162" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.612372] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74de861-4553-4985-82f1-a5ccfcc57a91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.625987] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce674f06-9470-4aed-a971-ddb04ea510c8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.645248] env[65758]: DEBUG nova.virt.block_device [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating existing volume attachment record: 964bf65d-b6cc-4088-9faa-62a2df588a59 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1070.764845] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661113, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.851057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.851643] env[65758]: DEBUG nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1070.855340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.466s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.856803] env[65758]: INFO nova.compute.claims [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1071.097290] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.097612] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.098709] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f66d579-91c7-49e7-a5c0-46079dea1945 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.121583] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3523e3b5-af10-41d6-89ff-676d86ba9543 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.149248] env[65758]: WARNING neutronclient.v2_0.client [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1071.157115] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Reconfiguring VM to detach interface {{(pid=65758) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1071.157351] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7612de3d-9e61-49f7-9dca-43b88a0fe0ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.178695] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1071.178695] env[65758]: value = "task-4661118" [ 1071.178695] env[65758]: _type = "Task" [ 1071.178695] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.192524] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.264114] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661113, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.361696] env[65758]: DEBUG nova.compute.utils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1071.363477] env[65758]: DEBUG nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1071.363829] env[65758]: DEBUG nova.network.neutron [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1071.364280] env[65758]: WARNING neutronclient.v2_0.client [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1071.364583] env[65758]: WARNING neutronclient.v2_0.client [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1071.365208] env[65758]: WARNING openstack [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1071.365559] env[65758]: WARNING openstack [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1071.375454] env[65758]: DEBUG nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1071.400094] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.400878] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f21184d-ff28-471a-b3c4-e3d4387e4da8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.410351] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 1071.410351] env[65758]: value = "task-4661119" [ 1071.410351] env[65758]: _type = "Task" [ 1071.410351] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.421837] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4661119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.434089] env[65758]: DEBUG nova.policy [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd07b5ba2c3ef430293fbf39148961763', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bad3e3c7054c424a800cb12e9c5dbb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1071.592255] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.592543] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.692452] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.762369] env[65758]: DEBUG nova.network.neutron [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Successfully created port: 4b5db70a-0308-435f-8c89-1a51d34af34e {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1071.768857] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661113, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.686972} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.769153] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/df203c52-cb8e-4277-903c-c114ae8627be/df203c52-cb8e-4277-903c-c114ae8627be.vmdk to [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50/63b744d2-541a-42e3-9717-b06a4459fd50.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1071.769993] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4cea75-c8f7-44e6-9951-41cde51c808e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.796025] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50/63b744d2-541a-42e3-9717-b06a4459fd50.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.798575] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b2907f9-2167-4e47-9fd2-680e09c9e13c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.814749] env[65758]: DEBUG nova.compute.manager [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Stashing vm_state: stopped {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1071.824813] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1071.824813] env[65758]: value = "task-4661120" [ 1071.824813] env[65758]: _type = "Task" [ 1071.824813] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.835288] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661120, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.922318] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4661119, 'name': PowerOffVM_Task, 'duration_secs': 0.2262} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.925718] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.926539] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.927388] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dd77ef0-b672-4ec7-ab9f-6c2254c12e01 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.935332] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 1071.935332] env[65758]: value = "task-4661121" [ 1071.935332] env[65758]: _type = "Task" [ 1071.935332] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.949957] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1071.950189] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1071.950902] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909979', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'name': 'volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ce11868-fee2-40d3-9433-7bc398a1f756', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'serial': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1071.951556] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660d4bee-9c0b-47b1-89b6-b1d8175e5cfe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.972745] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1027bf-f090-4e37-8c35-06464816dce3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.980957] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b267df-9873-4071-ac65-e96e62e99df8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.005643] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ff02e9-ecae-4bb3-8c0b-4a01201eeb22 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.024989] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] The volume has not been displaced from its original location: [datastore1] volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f/volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1072.031037] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Reconfiguring VM instance instance-00000056 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1072.035270] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-671f1c83-e980-4ee7-af5a-29d37fe183e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.057626] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 1072.057626] env[65758]: value = "task-4661122" [ 1072.057626] env[65758]: _type = "Task" [ 1072.057626] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.065946] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4661122, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.096114] env[65758]: DEBUG nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1072.192347] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.206785] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9afb64c-2abb-46b9-bf7b-eb4996ffa92f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.214919] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719645d9-36af-416d-b1df-84cb23b3759c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.247915] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e233a3-73cc-4241-aff1-db8f0e6586e8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.259325] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff0b3b1-759d-437f-9e34-ca0e69d138fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.275104] env[65758]: DEBUG nova.compute.provider_tree [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.337235] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661120, 'name': ReconfigVM_Task, 'duration_secs': 0.496043} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.338370] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.338759] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50/63b744d2-541a-42e3-9717-b06a4459fd50.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.339521] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51324066-0d27-413a-a983-3e1fc3abc942 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.348310] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1072.348310] env[65758]: value = "task-4661123" [ 1072.348310] env[65758]: _type = "Task" [ 1072.348310] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.359180] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661123, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.391781] env[65758]: DEBUG nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1072.420628] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1072.420973] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1072.421048] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1072.421225] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1072.421367] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1072.421506] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1072.421710] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1072.421865] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1072.422108] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1072.422296] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1072.422467] env[65758]: DEBUG nova.virt.hardware [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1072.423368] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da108573-1159-4149-814f-fc313b3515b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.433028] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633d77ad-733b-4589-a449-a4118b03453b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.566177] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4661122, 'name': ReconfigVM_Task, 'duration_secs': 0.175934} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.566372] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Reconfigured VM instance instance-00000056 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1072.571402] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2437381c-f743-49d7-82d0-fec93a4748ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.587462] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 1072.587462] env[65758]: value = "task-4661124" [ 1072.587462] env[65758]: _type = "Task" [ 1072.587462] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.596487] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4661124, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.616591] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.691352] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.779122] env[65758]: DEBUG nova.scheduler.client.report [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1072.861821] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661123, 'name': Rename_Task, 'duration_secs': 0.157599} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.862233] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.862569] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a053d0ef-47b5-4625-bc72-ea589ad1ca11 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.871881] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1072.871881] env[65758]: value = "task-4661125" [ 1072.871881] env[65758]: _type = "Task" [ 1072.871881] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.882313] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.099445] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4661124, 'name': ReconfigVM_Task, 'duration_secs': 0.179311} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.099853] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-909979', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'name': 'volume-bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0ce11868-fee2-40d3-9433-7bc398a1f756', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f', 'serial': 'bf4db715-09f3-4c4a-8572-d4bc048f0a0f'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1073.100466] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.101485] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36c5994-cf7c-4242-823a-9ab537100056 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.110089] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1073.110527] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1a9c70e-87f5-485f-9289-97b18c52bc45 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.193706] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.195438] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1073.195691] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1073.195895] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Deleting the datastore file [datastore1] 0ce11868-fee2-40d3-9433-7bc398a1f756 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.196278] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b5fbbf7-5000-400d-abbc-353dfe0f2af2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.205590] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for the task: (returnval){ [ 1073.205590] env[65758]: value = "task-4661128" [ 1073.205590] env[65758]: _type = "Task" [ 1073.205590] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.216623] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4661128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.284782] env[65758]: DEBUG nova.compute.manager [req-0de6f2dc-4899-49b5-98a4-f594e92b4362 req-d204702f-5bbf-46fc-89e6-59b66cac373d service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Received event network-vif-plugged-4b5db70a-0308-435f-8c89-1a51d34af34e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1073.285081] env[65758]: DEBUG oslo_concurrency.lockutils [req-0de6f2dc-4899-49b5-98a4-f594e92b4362 req-d204702f-5bbf-46fc-89e6-59b66cac373d service nova] Acquiring lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.285241] env[65758]: DEBUG oslo_concurrency.lockutils [req-0de6f2dc-4899-49b5-98a4-f594e92b4362 req-d204702f-5bbf-46fc-89e6-59b66cac373d service nova] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.285415] env[65758]: DEBUG oslo_concurrency.lockutils [req-0de6f2dc-4899-49b5-98a4-f594e92b4362 req-d204702f-5bbf-46fc-89e6-59b66cac373d service nova] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.285597] env[65758]: DEBUG nova.compute.manager [req-0de6f2dc-4899-49b5-98a4-f594e92b4362 req-d204702f-5bbf-46fc-89e6-59b66cac373d service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] No waiting events found dispatching network-vif-plugged-4b5db70a-0308-435f-8c89-1a51d34af34e {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1073.285746] env[65758]: WARNING nova.compute.manager [req-0de6f2dc-4899-49b5-98a4-f594e92b4362 req-d204702f-5bbf-46fc-89e6-59b66cac373d service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Received unexpected event network-vif-plugged-4b5db70a-0308-435f-8c89-1a51d34af34e for instance with vm_state building and task_state spawning. [ 1073.286892] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.287380] env[65758]: DEBUG nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1073.291526] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.953s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.385625] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661125, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.395150] env[65758]: DEBUG nova.network.neutron [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Successfully updated port: 4b5db70a-0308-435f-8c89-1a51d34af34e {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1073.691231] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.717050] env[65758]: DEBUG oslo_vmware.api [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Task: {'id': task-4661128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086933} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.717050] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.717050] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1073.717050] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1073.774604] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1073.774963] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84b9ff59-158a-4c9e-b22c-0a65322097c5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.787380] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5603250-5228-4191-a260-0bafe2ac14b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.800268] env[65758]: DEBUG nova.compute.utils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1073.803732] env[65758]: INFO nova.compute.claims [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.808156] env[65758]: DEBUG nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1073.808430] env[65758]: DEBUG nova.network.neutron [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1073.808970] env[65758]: WARNING neutronclient.v2_0.client [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1073.809638] env[65758]: WARNING neutronclient.v2_0.client [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1073.810087] env[65758]: WARNING openstack [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1073.811064] env[65758]: WARNING openstack [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1073.845176] env[65758]: ERROR nova.compute.manager [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Failed to detach volume bf4db715-09f3-4c4a-8572-d4bc048f0a0f from /dev/sda: nova.exception.InstanceNotFound: Instance 0ce11868-fee2-40d3-9433-7bc398a1f756 could not be found. [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Traceback (most recent call last): [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 4224, in _do_rebuild_instance [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self.driver.rebuild(**kwargs) [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/driver.py", line 533, in rebuild [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] raise NotImplementedError() [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] NotImplementedError [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] During handling of the above exception, another exception occurred: [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Traceback (most recent call last): [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 3647, in _detach_root_volume [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self.driver.detach_volume(context, old_connection_info, [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] return self._volumeops.detach_volume(connection_info, instance) [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self._detach_volume_vmdk(connection_info, instance) [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] stable_ref.fetch_moref(session) [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] nova.exception.InstanceNotFound: Instance 0ce11868-fee2-40d3-9433-7bc398a1f756 could not be found. [ 1073.845176] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] [ 1073.871809] env[65758]: DEBUG nova.policy [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b15f650508f844388197b63e6fee78a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4c2ab2b80c04c38bfb4c7cafac87fe6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1073.886500] env[65758]: DEBUG oslo_vmware.api [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661125, 'name': PowerOnVM_Task, 'duration_secs': 0.644408} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.886760] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.899322] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.899926] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.899926] env[65758]: DEBUG nova.network.neutron [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1073.997920] env[65758]: DEBUG nova.compute.manager [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1073.999012] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651839eb-6536-46f1-82aa-311e009115ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.018968] env[65758]: DEBUG nova.compute.utils [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Build of instance 0ce11868-fee2-40d3-9433-7bc398a1f756 aborted: Failed to rebuild volume backed instance. {{(pid=65758) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1074.021203] env[65758]: ERROR nova.compute.manager [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 0ce11868-fee2-40d3-9433-7bc398a1f756 aborted: Failed to rebuild volume backed instance. [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Traceback (most recent call last): [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 4224, in _do_rebuild_instance [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self.driver.rebuild(**kwargs) [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/driver.py", line 533, in rebuild [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] raise NotImplementedError() [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] NotImplementedError [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] During handling of the above exception, another exception occurred: [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Traceback (most recent call last): [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 3682, in _rebuild_volume_backed_instance [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self._detach_root_volume(context, instance, root_bdm) [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 3661, in _detach_root_volume [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] with excutils.save_and_reraise_exception(): [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self.force_reraise() [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] raise self.value [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 3647, in _detach_root_volume [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self.driver.detach_volume(context, old_connection_info, [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] return self._volumeops.detach_volume(connection_info, instance) [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self._detach_volume_vmdk(connection_info, instance) [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] stable_ref.fetch_moref(session) [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] nova.exception.InstanceNotFound: Instance 0ce11868-fee2-40d3-9433-7bc398a1f756 could not be found. [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] During handling of the above exception, another exception occurred: [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Traceback (most recent call last): [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 11553, in _error_out_instance_on_exception [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] yield [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 3950, in rebuild_instance [ 1074.021203] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self._do_rebuild_instance_with_claim( [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 4036, in _do_rebuild_instance_with_claim [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self._do_rebuild_instance( [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 4228, in _do_rebuild_instance [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self._rebuild_default_impl(**kwargs) [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 3805, in _rebuild_default_impl [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] self._rebuild_volume_backed_instance( [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] File "/opt/stack/nova/nova/compute/manager.py", line 3697, in _rebuild_volume_backed_instance [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] raise exception.BuildAbortException( [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] nova.exception.BuildAbortException: Build of instance 0ce11868-fee2-40d3-9433-7bc398a1f756 aborted: Failed to rebuild volume backed instance. [ 1074.022865] env[65758]: ERROR nova.compute.manager [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] [ 1074.192318] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.222316] env[65758]: DEBUG nova.network.neutron [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Successfully created port: 67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1074.309025] env[65758]: DEBUG nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1074.322128] env[65758]: INFO nova.compute.resource_tracker [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating resource usage from migration c2b17f22-6bec-4cfa-bbde-36c745a9c6b9 [ 1074.403095] env[65758]: WARNING openstack [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1074.403560] env[65758]: WARNING openstack [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1074.458877] env[65758]: DEBUG nova.network.neutron [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1074.522520] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e1ceb60-62d6-4e34-b42d-db7f2bb29ced tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.044s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.574426] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11768b5a-e9d8-4434-a8fa-060c20b98968 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.585847] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ad57d4-70fd-403f-9242-998a8bf7c77c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.621634] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070adc2c-2e69-4079-b9d3-d06ad2be3a5d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.631110] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce16b04-1ae3-46b3-a17c-ea044e4bb165 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.646309] env[65758]: DEBUG nova.compute.provider_tree [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.667050] env[65758]: WARNING neutronclient.v2_0.client [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1074.667050] env[65758]: WARNING openstack [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1074.667050] env[65758]: WARNING openstack [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1074.692425] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.150561] env[65758]: DEBUG nova.scheduler.client.report [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.193547] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.319538] env[65758]: DEBUG nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1075.335747] env[65758]: DEBUG nova.network.neutron [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Updating instance_info_cache with network_info: [{"id": "4b5db70a-0308-435f-8c89-1a51d34af34e", "address": "fa:16:3e:d4:39:8c", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5db70a-03", "ovs_interfaceid": "4b5db70a-0308-435f-8c89-1a51d34af34e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1075.352502] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1075.352775] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1075.352955] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1075.353235] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1075.353386] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1075.353528] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1075.353733] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1075.353887] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1075.354098] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1075.354322] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1075.354507] env[65758]: DEBUG nova.virt.hardware [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1075.355483] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd395465-a41b-4ebd-b321-2df32a5eee4a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.367083] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec40d6d-b0cc-41f6-a7d4-ce81c5caa71b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.554842] env[65758]: DEBUG nova.compute.manager [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Received event network-changed-4b5db70a-0308-435f-8c89-1a51d34af34e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1075.555496] env[65758]: DEBUG nova.compute.manager [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Refreshing instance network info cache due to event network-changed-4b5db70a-0308-435f-8c89-1a51d34af34e. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1075.555852] env[65758]: DEBUG oslo_concurrency.lockutils [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] Acquiring lock "refresh_cache-6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.656102] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.365s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.656517] env[65758]: INFO nova.compute.manager [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Migrating [ 1075.667712] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.052s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.669563] env[65758]: INFO nova.compute.claims [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1075.699081] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.779940] env[65758]: DEBUG nova.compute.manager [req-4f7e2116-4ae5-4b08-99d8-126a1d130aa5 req-b7b981b4-b1d2-44ac-b4dc-6d1ee75eb3ed service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Received event network-vif-plugged-67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1075.780181] env[65758]: DEBUG oslo_concurrency.lockutils [req-4f7e2116-4ae5-4b08-99d8-126a1d130aa5 req-b7b981b4-b1d2-44ac-b4dc-6d1ee75eb3ed service nova] Acquiring lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.780677] env[65758]: DEBUG oslo_concurrency.lockutils [req-4f7e2116-4ae5-4b08-99d8-126a1d130aa5 req-b7b981b4-b1d2-44ac-b4dc-6d1ee75eb3ed service nova] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.780906] env[65758]: DEBUG oslo_concurrency.lockutils [req-4f7e2116-4ae5-4b08-99d8-126a1d130aa5 req-b7b981b4-b1d2-44ac-b4dc-6d1ee75eb3ed service nova] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.781113] env[65758]: DEBUG nova.compute.manager [req-4f7e2116-4ae5-4b08-99d8-126a1d130aa5 req-b7b981b4-b1d2-44ac-b4dc-6d1ee75eb3ed service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] No waiting events found dispatching network-vif-plugged-67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1075.781375] env[65758]: WARNING nova.compute.manager [req-4f7e2116-4ae5-4b08-99d8-126a1d130aa5 req-b7b981b4-b1d2-44ac-b4dc-6d1ee75eb3ed service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Received unexpected event network-vif-plugged-67e62b92-0851-4648-b7d7-181b274c8325 for instance with vm_state building and task_state spawning. [ 1075.785211] env[65758]: DEBUG nova.network.neutron [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Successfully updated port: 67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1075.838772] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.838965] env[65758]: DEBUG nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Instance network_info: |[{"id": "4b5db70a-0308-435f-8c89-1a51d34af34e", "address": "fa:16:3e:d4:39:8c", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5db70a-03", "ovs_interfaceid": "4b5db70a-0308-435f-8c89-1a51d34af34e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1075.839360] env[65758]: DEBUG oslo_concurrency.lockutils [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] Acquired lock "refresh_cache-6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.839457] env[65758]: DEBUG nova.network.neutron [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Refreshing network info cache for port 4b5db70a-0308-435f-8c89-1a51d34af34e {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1075.840751] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:39:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b5db70a-0308-435f-8c89-1a51d34af34e', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1075.848344] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1075.849548] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1075.849883] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3bd6a6b-35d9-4172-8e3e-0385445a88d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.883827] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1075.883827] env[65758]: value = "task-4661129" [ 1075.883827] env[65758]: _type = "Task" [ 1075.883827] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.899930] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661129, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.013856] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.014265] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.043298] env[65758]: DEBUG oslo_concurrency.lockutils [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.183415] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.183841] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.183901] env[65758]: DEBUG nova.network.neutron [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1076.196565] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.266705] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.287860] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.288213] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.288496] env[65758]: DEBUG nova.network.neutron [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1076.349821] env[65758]: WARNING neutronclient.v2_0.client [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1076.351021] env[65758]: WARNING openstack [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1076.351632] env[65758]: WARNING openstack [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1076.398082] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661129, 'name': CreateVM_Task, 'duration_secs': 0.461983} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.398546] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1076.399104] env[65758]: WARNING neutronclient.v2_0.client [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1076.399533] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.399686] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.400020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1076.400683] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f686dbf-3d33-4f24-aaac-53ac1f6a01a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.407453] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1076.407453] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52236340-fdde-071a-474a-9c97c7b0e4d6" [ 1076.407453] env[65758]: _type = "Task" [ 1076.407453] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.423651] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52236340-fdde-071a-474a-9c97c7b0e4d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.517571] env[65758]: INFO nova.compute.manager [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Detaching volume e29ad381-7d88-46b8-b08e-180dc4b43679 [ 1076.549165] env[65758]: WARNING neutronclient.v2_0.client [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1076.549872] env[65758]: WARNING openstack [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1076.550268] env[65758]: WARNING openstack [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1076.561362] env[65758]: INFO nova.virt.block_device [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Attempting to driver detach volume e29ad381-7d88-46b8-b08e-180dc4b43679 from mountpoint /dev/sdb [ 1076.562283] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1076.562283] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910014', 'volume_id': 'e29ad381-7d88-46b8-b08e-180dc4b43679', 'name': 'volume-e29ad381-7d88-46b8-b08e-180dc4b43679', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49', 'attached_at': '', 'detached_at': '', 'volume_id': 'e29ad381-7d88-46b8-b08e-180dc4b43679', 'serial': 'e29ad381-7d88-46b8-b08e-180dc4b43679'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1076.562775] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d136bca-c081-4f25-a06e-54a2fd339b3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.590845] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb80db9-a907-4361-bf5d-0bf003b19fe4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.604626] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a7d55e-e02d-4625-a1b6-e675a2dc4141 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.627663] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7831e381-e8a2-4234-ad02-75c35ebfda3e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.647555] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] The volume has not been displaced from its original location: [datastore1] volume-e29ad381-7d88-46b8-b08e-180dc4b43679/volume-e29ad381-7d88-46b8-b08e-180dc4b43679.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1076.653178] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfiguring VM instance instance-0000003b to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1076.655782] env[65758]: DEBUG nova.network.neutron [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Updated VIF entry in instance network info cache for port 4b5db70a-0308-435f-8c89-1a51d34af34e. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1076.656161] env[65758]: DEBUG nova.network.neutron [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Updating instance_info_cache with network_info: [{"id": "4b5db70a-0308-435f-8c89-1a51d34af34e", "address": "fa:16:3e:d4:39:8c", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5db70a-03", "ovs_interfaceid": "4b5db70a-0308-435f-8c89-1a51d34af34e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1076.657454] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ac8c2ef-d23e-444c-86d4-fd253a78207f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.672520] env[65758]: DEBUG oslo_concurrency.lockutils [req-b87cf6cd-4ff6-4829-ba45-137b6a731549 req-4bf9f74d-5d09-4eda-8026-46d57c776763 service nova] Releasing lock "refresh_cache-6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.682706] env[65758]: DEBUG oslo_vmware.api [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1076.682706] env[65758]: value = "task-4661130" [ 1076.682706] env[65758]: _type = "Task" [ 1076.682706] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.689809] env[65758]: WARNING neutronclient.v2_0.client [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1076.690942] env[65758]: WARNING openstack [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1076.691338] env[65758]: WARNING openstack [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1076.715877] env[65758]: DEBUG oslo_vmware.api [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661130, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.716120] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.794358] env[65758]: WARNING openstack [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1076.794827] env[65758]: WARNING openstack [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1076.859506] env[65758]: DEBUG nova.network.neutron [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1076.905399] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64866499-2893-4961-b6f9-50ed56303210 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.918278] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b9173c99-6aae-4c3f-84e1-ac2eb1b8572b tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Suspending the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1076.919173] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-b4b48674-291f-4a30-96be-139bd7296491 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.925574] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52236340-fdde-071a-474a-9c97c7b0e4d6, 'name': SearchDatastore_Task, 'duration_secs': 0.015078} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.929459] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.929459] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1076.929716] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.929965] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.930247] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.938326] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99776be3-e8a9-45b9-b152-83d1c8be840b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.940739] env[65758]: DEBUG oslo_vmware.api [None req-b9173c99-6aae-4c3f-84e1-ac2eb1b8572b tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1076.940739] env[65758]: value = "task-4661131" [ 1076.940739] env[65758]: _type = "Task" [ 1076.940739] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.952023] env[65758]: DEBUG oslo_vmware.api [None req-b9173c99-6aae-4c3f-84e1-ac2eb1b8572b tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661131, 'name': SuspendVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.957506] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.957506] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1076.958277] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ee2aa2-7a03-46db-9528-f088ed771eda {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.964861] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1076.964861] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f4b477-786d-a94e-5917-ac966095d687" [ 1076.964861] env[65758]: _type = "Task" [ 1076.964861] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.975239] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f4b477-786d-a94e-5917-ac966095d687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.995829] env[65758]: WARNING neutronclient.v2_0.client [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1076.996588] env[65758]: WARNING openstack [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1076.996973] env[65758]: WARNING openstack [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1077.007824] env[65758]: WARNING neutronclient.v2_0.client [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1077.009632] env[65758]: WARNING openstack [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1077.009632] env[65758]: WARNING openstack [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1077.021765] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a1f4f4-56b7-4e14-a106-ec0a1e7ba3e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.030960] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e55ad00-e1e6-4817-8461-d242176b3bc8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.072474] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ecaa06-0ba5-4f34-9ffd-e3f18ef68f06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.081313] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89619915-a5a9-442a-9430-bbd48e7665db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.096373] env[65758]: DEBUG nova.compute.provider_tree [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.114779] env[65758]: DEBUG nova.network.neutron [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [{"id": "2e41907c-1553-48df-9644-cb422d2f19df", "address": "fa:16:3e:b2:e3:b9", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e41907c-15", "ovs_interfaceid": "2e41907c-1553-48df-9644-cb422d2f19df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1077.146744] env[65758]: DEBUG nova.network.neutron [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance_info_cache with network_info: [{"id": "67e62b92-0851-4648-b7d7-181b274c8325", "address": "fa:16:3e:23:1f:98", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e62b92-08", "ovs_interfaceid": "67e62b92-0851-4648-b7d7-181b274c8325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1077.179307] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Acquiring lock "0ce11868-fee2-40d3-9433-7bc398a1f756" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.179585] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "0ce11868-fee2-40d3-9433-7bc398a1f756" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.179858] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Acquiring lock "0ce11868-fee2-40d3-9433-7bc398a1f756-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.180130] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "0ce11868-fee2-40d3-9433-7bc398a1f756-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.180314] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "0ce11868-fee2-40d3-9433-7bc398a1f756-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.182701] env[65758]: INFO nova.compute.manager [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Terminating instance [ 1077.198115] env[65758]: DEBUG oslo_vmware.api [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661130, 'name': ReconfigVM_Task, 'duration_secs': 0.254265} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.198874] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Reconfigured VM instance instance-0000003b to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1077.206818] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bb9c6e6-8e0b-40d8-8dac-60d879e9dfd0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.216938] env[65758]: DEBUG oslo_vmware.api [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661118, 'name': ReconfigVM_Task, 'duration_secs': 5.882061} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.217696] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.217962] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Reconfigured VM to detach interface {{(pid=65758) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1077.218552] env[65758]: WARNING neutronclient.v2_0.client [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1077.218932] env[65758]: WARNING neutronclient.v2_0.client [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1077.219539] env[65758]: WARNING openstack [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1077.219892] env[65758]: WARNING openstack [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1077.234087] env[65758]: DEBUG oslo_vmware.api [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1077.234087] env[65758]: value = "task-4661132" [ 1077.234087] env[65758]: _type = "Task" [ 1077.234087] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.247112] env[65758]: DEBUG oslo_vmware.api [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661132, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.452415] env[65758]: DEBUG oslo_vmware.api [None req-b9173c99-6aae-4c3f-84e1-ac2eb1b8572b tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661131, 'name': SuspendVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.479435] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f4b477-786d-a94e-5917-ac966095d687, 'name': SearchDatastore_Task, 'duration_secs': 0.027305} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.479935] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f17f58ec-3f62-44ee-bc00-b46f2908954f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.489410] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1077.489410] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bf5909-da22-5e18-f7c3-3f71b39d8972" [ 1077.489410] env[65758]: _type = "Task" [ 1077.489410] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.499349] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bf5909-da22-5e18-f7c3-3f71b39d8972, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.590865] env[65758]: DEBUG nova.compute.manager [req-94e113e6-ebad-4557-92f2-7fc6fb98ecb9 req-4a2ebb77-0356-42ee-94c3-da0b40e8d4be service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-vif-deleted-608946a3-79b3-484c-b023-da1a84676162 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1077.591096] env[65758]: INFO nova.compute.manager [req-94e113e6-ebad-4557-92f2-7fc6fb98ecb9 req-4a2ebb77-0356-42ee-94c3-da0b40e8d4be service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Neutron deleted interface 608946a3-79b3-484c-b023-da1a84676162; detaching it from the instance and deleting it from the info cache [ 1077.591417] env[65758]: DEBUG nova.network.neutron [req-94e113e6-ebad-4557-92f2-7fc6fb98ecb9 req-4a2ebb77-0356-42ee-94c3-da0b40e8d4be service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f6931aa0-2403-4052-97bb-c06158af9887", "address": "fa:16:3e:5a:e4:1c", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6931aa0-24", "ovs_interfaceid": "f6931aa0-2403-4052-97bb-c06158af9887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1077.604185] env[65758]: DEBUG nova.scheduler.client.report [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1077.618580] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.649344] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.649731] env[65758]: DEBUG nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Instance network_info: |[{"id": "67e62b92-0851-4648-b7d7-181b274c8325", "address": "fa:16:3e:23:1f:98", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e62b92-08", "ovs_interfaceid": "67e62b92-0851-4648-b7d7-181b274c8325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1077.650237] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:1f:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5efce30e-48dd-493a-a354-f562a8adf7af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67e62b92-0851-4648-b7d7-181b274c8325', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.661145] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1077.662112] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1077.662529] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97c7ca2b-3799-4d90-9dc2-d8148452df69 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.686968] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.686968] env[65758]: value = "task-4661133" [ 1077.686968] env[65758]: _type = "Task" [ 1077.686968] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.690878] env[65758]: DEBUG nova.compute.manager [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1077.691268] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc2f1352-e213-443e-8c63-c1319918a11d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.700254] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661133, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.706523] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de9fb86-b884-47b2-b649-cd2c8044c942 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.726502] env[65758]: DEBUG oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5204aa4b-c2f0-f0b1-0805-7e567a0b04aa/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1077.728052] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9174e1aa-7c9e-4f57-b311-f5795a883e9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.736301] env[65758]: DEBUG oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5204aa4b-c2f0-f0b1-0805-7e567a0b04aa/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1077.736477] env[65758]: ERROR oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5204aa4b-c2f0-f0b1-0805-7e567a0b04aa/disk-0.vmdk due to incomplete transfer. [ 1077.739975] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7605c82d-93c7-4408-9f9e-925278f28084 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.756718] env[65758]: WARNING nova.virt.vmwareapi.driver [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 0ce11868-fee2-40d3-9433-7bc398a1f756 could not be found. [ 1077.756956] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.758098] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f501edd7-83b6-4a1e-b67a-2303ba051c4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.763169] env[65758]: DEBUG oslo_vmware.api [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661132, 'name': ReconfigVM_Task, 'duration_secs': 0.159681} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.763882] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910014', 'volume_id': 'e29ad381-7d88-46b8-b08e-180dc4b43679', 'name': 'volume-e29ad381-7d88-46b8-b08e-180dc4b43679', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49', 'attached_at': '', 'detached_at': '', 'volume_id': 'e29ad381-7d88-46b8-b08e-180dc4b43679', 'serial': 'e29ad381-7d88-46b8-b08e-180dc4b43679'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1077.767411] env[65758]: DEBUG oslo_vmware.rw_handles [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5204aa4b-c2f0-f0b1-0805-7e567a0b04aa/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1077.767626] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Uploaded image 4716357a-6eaf-4452-a4eb-55c3224666a3 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1077.769925] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1077.771264] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bedb0ad5-ff8d-4798-8cb1-35312f35bd1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.775809] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61266de-5ea7-4357-bb9a-403924128e56 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.798395] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1077.798395] env[65758]: value = "task-4661134" [ 1077.798395] env[65758]: _type = "Task" [ 1077.798395] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.810521] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661134, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.825724] env[65758]: WARNING nova.virt.vmwareapi.vmops [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0ce11868-fee2-40d3-9433-7bc398a1f756 could not be found. [ 1077.825906] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.826157] env[65758]: INFO nova.compute.manager [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Took 0.14 seconds to destroy the instance on the hypervisor. [ 1077.826446] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1077.828143] env[65758]: DEBUG nova.compute.manager [-] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1077.828233] env[65758]: DEBUG nova.network.neutron [-] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1077.828510] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1077.829121] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1077.829400] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1077.839362] env[65758]: DEBUG nova.compute.manager [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Received event network-changed-67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1077.839362] env[65758]: DEBUG nova.compute.manager [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Refreshing instance network info cache due to event network-changed-67e62b92-0851-4648-b7d7-181b274c8325. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1077.839362] env[65758]: DEBUG oslo_concurrency.lockutils [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] Acquiring lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.839362] env[65758]: DEBUG oslo_concurrency.lockutils [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] Acquired lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.839362] env[65758]: DEBUG nova.network.neutron [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Refreshing network info cache for port 67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1077.932647] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1077.953739] env[65758]: DEBUG oslo_vmware.api [None req-b9173c99-6aae-4c3f-84e1-ac2eb1b8572b tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661131, 'name': SuspendVM_Task} progress is 62%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.001722] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bf5909-da22-5e18-f7c3-3f71b39d8972, 'name': SearchDatastore_Task, 'duration_secs': 0.012731} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.002052] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.002374] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc/6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1078.002798] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-207f138a-3ff2-44ac-aefc-4f164f366c83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.013684] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1078.013684] env[65758]: value = "task-4661135" [ 1078.013684] env[65758]: _type = "Task" [ 1078.013684] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.025098] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.094204] env[65758]: DEBUG oslo_concurrency.lockutils [req-94e113e6-ebad-4557-92f2-7fc6fb98ecb9 req-4a2ebb77-0356-42ee-94c3-da0b40e8d4be service nova] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.094491] env[65758]: DEBUG oslo_concurrency.lockutils [req-94e113e6-ebad-4557-92f2-7fc6fb98ecb9 req-4a2ebb77-0356-42ee-94c3-da0b40e8d4be service nova] Acquired lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.095508] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134169af-c5d0-4383-8336-f4385fddc548 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.118322] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.118893] env[65758]: DEBUG nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1078.121977] env[65758]: DEBUG oslo_concurrency.lockutils [req-94e113e6-ebad-4557-92f2-7fc6fb98ecb9 req-4a2ebb77-0356-42ee-94c3-da0b40e8d4be service nova] Releasing lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.122309] env[65758]: WARNING nova.compute.manager [req-94e113e6-ebad-4557-92f2-7fc6fb98ecb9 req-4a2ebb77-0356-42ee-94c3-da0b40e8d4be service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Detach interface failed, port_id=608946a3-79b3-484c-b023-da1a84676162, reason: No device with interface-id 608946a3-79b3-484c-b023-da1a84676162 exists on VM: nova.exception.NotFound: No device with interface-id 608946a3-79b3-484c-b023-da1a84676162 exists on VM [ 1078.122785] env[65758]: DEBUG oslo_concurrency.lockutils [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.080s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.198553] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661133, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.316784] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661134, 'name': Destroy_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.335750] env[65758]: DEBUG nova.objects.instance [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lazy-loading 'flavor' on Instance uuid 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.343782] env[65758]: WARNING neutronclient.v2_0.client [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1078.343782] env[65758]: WARNING openstack [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1078.346353] env[65758]: WARNING openstack [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1078.403835] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5fe58f-b546-46f2-a71d-4d4e75140d4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.412783] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203556c6-684d-44ae-8958-4f307b768870 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.458556] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc043861-814e-489b-9f00-a9162ca81efa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.469796] env[65758]: DEBUG oslo_vmware.api [None req-b9173c99-6aae-4c3f-84e1-ac2eb1b8572b tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661131, 'name': SuspendVM_Task, 'duration_secs': 1.372463} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.472226] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b9173c99-6aae-4c3f-84e1-ac2eb1b8572b tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Suspended the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1078.472226] env[65758]: DEBUG nova.compute.manager [None req-b9173c99-6aae-4c3f-84e1-ac2eb1b8572b tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1078.473154] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e862edc0-d8ca-4d2f-8b8a-53dd56884a1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.477099] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df9a8f5-1a15-4291-897e-789d327acba8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.501751] env[65758]: DEBUG nova.compute.provider_tree [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.510794] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.511600] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.511600] env[65758]: DEBUG nova.network.neutron [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1078.527646] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661135, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.632857] env[65758]: DEBUG nova.compute.utils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1078.638653] env[65758]: DEBUG nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1078.639077] env[65758]: DEBUG nova.network.neutron [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1078.639297] env[65758]: WARNING neutronclient.v2_0.client [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1078.639620] env[65758]: WARNING neutronclient.v2_0.client [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1078.640350] env[65758]: WARNING openstack [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1078.640656] env[65758]: WARNING openstack [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1078.698888] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661133, 'name': CreateVM_Task, 'duration_secs': 0.585926} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.699094] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1078.699627] env[65758]: WARNING neutronclient.v2_0.client [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1078.699993] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.700177] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.700650] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1078.700808] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9df9b31-768f-4fe1-bd07-261ca6b16bb0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.704599] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.704599] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.710677] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1078.710677] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f79d4e-cf64-6955-13d0-878887c3ce61" [ 1078.710677] env[65758]: _type = "Task" [ 1078.710677] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.723793] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f79d4e-cf64-6955-13d0-878887c3ce61, 'name': SearchDatastore_Task, 'duration_secs': 0.010134} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.724248] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.724535] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.724840] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.725017] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.725234] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.725557] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-322d980b-1765-43b0-a8a7-b1533dbe4bbf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.735930] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.736156] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1078.737489] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ed76ec9-9c79-4b46-9ced-c6cb286a82d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.746488] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1078.746488] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5289f39e-97ec-ecef-d784-08d97f1ab6f5" [ 1078.746488] env[65758]: _type = "Task" [ 1078.746488] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.756237] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5289f39e-97ec-ecef-d784-08d97f1ab6f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.775964] env[65758]: DEBUG nova.policy [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3338c19613c041abb681fa6cc661652a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e114eef3998848699a9a086fee86db29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1078.814455] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661134, 'name': Destroy_Task, 'duration_secs': 0.587506} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.814455] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Destroyed the VM [ 1078.814455] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1078.814455] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-10449734-f976-4b4e-9f7e-89feb18b366d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.822641] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1078.822641] env[65758]: value = "task-4661136" [ 1078.822641] env[65758]: _type = "Task" [ 1078.822641] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.841961] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661136, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.007484] env[65758]: DEBUG nova.network.neutron [-] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1079.009653] env[65758]: DEBUG nova.scheduler.client.report [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1079.014871] env[65758]: WARNING neutronclient.v2_0.client [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1079.015764] env[65758]: WARNING openstack [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1079.016143] env[65758]: WARNING openstack [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1079.038667] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663536} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.049463] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc/6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1079.049463] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1079.049463] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7530459-7a0c-4f5b-9431-14975ddd6c6b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.049463] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1079.049463] env[65758]: value = "task-4661137" [ 1079.049463] env[65758]: _type = "Task" [ 1079.049463] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.059780] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.138211] env[65758]: DEBUG nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1079.143174] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9248a1-a673-47bc-ba5d-bbd8099ad259 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.168104] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance '76ec31e6-65c2-4290-9ec0-b274be95baa4' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1079.216981] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.217808] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.218198] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.218198] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.218351] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.218493] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.218620] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1079.218755] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.257577] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5289f39e-97ec-ecef-d784-08d97f1ab6f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010135} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.258669] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-291f3083-ab8b-465d-a009-ec994be56529 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.265727] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1079.265727] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52650bf6-db72-44a5-2c3c-f63e15cb1684" [ 1079.265727] env[65758]: _type = "Task" [ 1079.265727] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.275859] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52650bf6-db72-44a5-2c3c-f63e15cb1684, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.342271] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661136, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.344855] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7fba6a84-e9f4-4e11-a29d-c3ab146150f9 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.331s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.353655] env[65758]: DEBUG nova.network.neutron [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Successfully created port: 05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1079.514673] env[65758]: INFO nova.compute.manager [-] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Took 1.69 seconds to deallocate network for instance. [ 1079.527155] env[65758]: DEBUG oslo_concurrency.lockutils [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.404s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.527393] env[65758]: INFO nova.compute.manager [None req-887e8b4e-e562-44fa-be73-2999fa3e94ac tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Successfully reverted task state from rebuilding on failure for instance. [ 1079.535758] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.269s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.536131] env[65758]: DEBUG nova.objects.instance [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'pci_requests' on Instance uuid afc1eb16-c275-4b3b-a7fe-9938d2241e24 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.562595] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081821} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.563059] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1079.564207] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7db149b-726f-421b-8b0b-ad06e948eae4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.590997] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc/6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1079.592518] env[65758]: WARNING neutronclient.v2_0.client [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1079.593242] env[65758]: WARNING openstack [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1079.593625] env[65758]: WARNING openstack [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1079.602037] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c60656ea-d54a-46b7-b60c-bcf7cf64cf3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.621303] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.621567] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.621863] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.622168] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.622499] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.625084] env[65758]: INFO nova.compute.manager [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Terminating instance [ 1079.633502] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1079.633502] env[65758]: value = "task-4661138" [ 1079.633502] env[65758]: _type = "Task" [ 1079.633502] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.646283] env[65758]: INFO nova.virt.block_device [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Booting with volume eef78269-5bc6-4fe3-9fa1-c9e27001a9e1 at /dev/sda [ 1079.647997] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661138, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.687484] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.687484] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7306dc4c-d6b9-41bb-adba-97d40f6f27f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.689947] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1079.689947] env[65758]: value = "task-4661139" [ 1079.689947] env[65758]: _type = "Task" [ 1079.689947] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.711064] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1079.712022] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance '76ec31e6-65c2-4290-9ec0-b274be95baa4' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1079.721780] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.733784] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd8740e5-2e2d-4a90-b95b-b668e8da2c23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.746351] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3e087b-b0d8-478f-b9fe-243c4a11e9d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.776780] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52650bf6-db72-44a5-2c3c-f63e15cb1684, 'name': SearchDatastore_Task, 'duration_secs': 0.011874} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.792369] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.792711] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81/47cebd84-f9a1-4997-96aa-c76c5faa8c81.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1079.793789] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16b34e37-a6d5-4246-a1f9-5499296d6f41 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.796164] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82053676-526b-470e-a421-8ebf32f26353 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.808537] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be71bbf7-56a3-4466-bbd4-10b4ded35681 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.820025] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1079.820025] env[65758]: value = "task-4661140" [ 1079.820025] env[65758]: _type = "Task" [ 1079.820025] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.856551] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74937c6e-e185-4c43-b693-d6bf2b0f3bbb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.860387] env[65758]: DEBUG oslo_vmware.api [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661136, 'name': RemoveSnapshot_Task, 'duration_secs': 0.655713} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.860795] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1079.861088] env[65758]: INFO nova.compute.manager [None req-81ec2c89-c801-4312-be72-368339dd2cd9 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Took 15.40 seconds to snapshot the instance on the hypervisor. [ 1079.870132] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319d0272-0994-4bc8-9e13-94220d3df6fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.888125] env[65758]: DEBUG nova.virt.block_device [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating existing volume attachment record: 66e06035-1f24-44a0-a832-83737abdde25 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1080.045165] env[65758]: DEBUG nova.objects.instance [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'numa_topology' on Instance uuid afc1eb16-c275-4b3b-a7fe-9938d2241e24 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.085945] env[65758]: INFO nova.compute.manager [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Took 0.57 seconds to detach 1 volumes for instance. [ 1080.087060] env[65758]: DEBUG nova.compute.manager [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Deleting volume: bf4db715-09f3-4c4a-8572-d4bc048f0a0f {{(pid=65758) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3320}} [ 1080.130075] env[65758]: DEBUG nova.compute.manager [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1080.130357] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1080.131272] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c179d8-a2eb-4888-90ce-c63e45e00017 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.143606] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1080.146837] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a52522e0-dc8c-4153-ab6f-16ada73d917d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.148560] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661138, 'name': ReconfigVM_Task, 'duration_secs': 0.317613} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.148838] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc/6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.149899] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1be6c647-1809-4d9e-9be8-e9aa2e8db2b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.154984] env[65758]: DEBUG oslo_vmware.api [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1080.154984] env[65758]: value = "task-4661141" [ 1080.154984] env[65758]: _type = "Task" [ 1080.154984] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.159030] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1080.159030] env[65758]: value = "task-4661142" [ 1080.159030] env[65758]: _type = "Task" [ 1080.159030] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.166639] env[65758]: DEBUG oslo_vmware.api [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661141, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.170812] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661142, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.218335] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1080.218808] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1080.220447] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1080.220802] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1080.221275] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1080.221514] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1080.221793] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1080.222019] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1080.222269] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1080.222507] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1080.222738] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1080.234312] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2591173-da0e-4bb9-9ed7-5280349ddbdb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.255962] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1080.255962] env[65758]: value = "task-4661144" [ 1080.255962] env[65758]: _type = "Task" [ 1080.255962] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.267625] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661144, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.333308] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661140, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.548946] env[65758]: INFO nova.compute.claims [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1080.569511] env[65758]: DEBUG nova.network.neutron [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updated VIF entry in instance network info cache for port 67e62b92-0851-4648-b7d7-181b274c8325. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1080.569931] env[65758]: DEBUG nova.network.neutron [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance_info_cache with network_info: [{"id": "67e62b92-0851-4648-b7d7-181b274c8325", "address": "fa:16:3e:23:1f:98", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e62b92-08", "ovs_interfaceid": "67e62b92-0851-4648-b7d7-181b274c8325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1080.630326] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.673344] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661142, 'name': Rename_Task, 'duration_secs': 0.17684} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.677286] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1080.677286] env[65758]: DEBUG oslo_vmware.api [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661141, 'name': PowerOffVM_Task, 'duration_secs': 0.412227} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.677286] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85b22d38-e714-4089-b685-e6e13982d387 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.678940] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1080.679134] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1080.679377] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc9cc152-c952-49aa-8da1-b98606887468 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.686988] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1080.686988] env[65758]: value = "task-4661145" [ 1080.686988] env[65758]: _type = "Task" [ 1080.686988] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.700531] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661145, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.768878] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661144, 'name': ReconfigVM_Task, 'duration_secs': 0.409538} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.769239] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance '76ec31e6-65c2-4290-9ec0-b274be95baa4' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1080.795389] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1080.795672] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1080.795860] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleting the datastore file [datastore1] 5fc4f1b8-9024-4155-b56d-56a8d08f0259 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1080.796191] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74f31ccc-d78e-465b-b6ee-cab6c22c4207 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.803444] env[65758]: DEBUG oslo_vmware.api [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1080.803444] env[65758]: value = "task-4661147" [ 1080.803444] env[65758]: _type = "Task" [ 1080.803444] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.813496] env[65758]: DEBUG oslo_vmware.api [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.834506] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661140, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.847809} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.835443] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81/47cebd84-f9a1-4997-96aa-c76c5faa8c81.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1080.835443] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1080.835443] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7eec2d07-c82a-46c2-8e37-d84f6de89952 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.843966] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1080.843966] env[65758]: value = "task-4661148" [ 1080.843966] env[65758]: _type = "Task" [ 1080.843966] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.854974] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661148, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.024290] env[65758]: WARNING neutronclient.v2_0.client [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1081.024290] env[65758]: WARNING openstack [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1081.024290] env[65758]: WARNING openstack [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1081.072983] env[65758]: DEBUG oslo_concurrency.lockutils [req-3382d551-9383-49de-8e3d-1ad387cf4489 req-d76f7683-f862-4659-b676-4e2f7a3d2c19 service nova] Releasing lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.171172] env[65758]: DEBUG nova.network.neutron [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Successfully updated port: 05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1081.201346] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661145, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.248951] env[65758]: INFO nova.network.neutron [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Port f6931aa0-2403-4052-97bb-c06158af9887 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1081.251365] env[65758]: DEBUG nova.network.neutron [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [{"id": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "address": "fa:16:3e:51:62:8a", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91b61d1-ee", "ovs_interfaceid": "e91b61d1-ee47-49e8-a302-26b7b0725dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1081.277876] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1081.278767] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.278767] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1081.278767] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.278958] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1081.278958] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1081.279443] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1081.279794] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1081.279907] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1081.280207] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1081.280553] env[65758]: DEBUG nova.virt.hardware [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1081.288615] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Reconfiguring VM instance instance-00000051 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1081.289743] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-928d567d-5e73-4ac0-a4d6-3e7474d4164b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.306987] env[65758]: DEBUG nova.compute.manager [req-f8a281b6-0913-462d-b427-de32dacd9deb req-9798a742-d8e9-4443-b98d-34579c220738 service nova] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Received event network-vif-deleted-f7953062-77ac-411d-9809-b817fca06bbb {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1081.307240] env[65758]: DEBUG nova.compute.manager [req-f8a281b6-0913-462d-b427-de32dacd9deb req-9798a742-d8e9-4443-b98d-34579c220738 service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-vif-deleted-f6931aa0-2403-4052-97bb-c06158af9887 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1081.319355] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1081.319355] env[65758]: value = "task-4661149" [ 1081.319355] env[65758]: _type = "Task" [ 1081.319355] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.323132] env[65758]: DEBUG oslo_vmware.api [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206631} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.326470] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1081.328317] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1081.328317] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1081.328317] env[65758]: INFO nova.compute.manager [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1081.328317] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1081.328317] env[65758]: DEBUG nova.compute.manager [-] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1081.328317] env[65758]: DEBUG nova.network.neutron [-] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1081.328317] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1081.328665] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1081.328932] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1081.346078] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661149, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.355847] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661148, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078437} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.356194] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1081.357123] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df35baa-0ee8-4000-8c0a-d4c3c4e562b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.388023] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81/47cebd84-f9a1-4997-96aa-c76c5faa8c81.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.388023] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1081.390035] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d5edc3f-826f-4367-9a2c-979818c85cc4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.413284] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1081.413284] env[65758]: value = "task-4661150" [ 1081.413284] env[65758]: _type = "Task" [ 1081.413284] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.424841] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.454026] env[65758]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port f6931aa0-2403-4052-97bb-c06158af9887 could not be found.", "detail": ""}} {{(pid=65758) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:265}} [ 1081.454398] env[65758]: DEBUG nova.network.neutron [-] Unable to show port f6931aa0-2403-4052-97bb-c06158af9887 as it no longer exists. {{(pid=65758) _unbind_ports /opt/stack/nova/nova/network/neutron.py:700}} [ 1081.676179] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.676179] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.676334] env[65758]: DEBUG nova.network.neutron [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1081.705465] env[65758]: DEBUG oslo_vmware.api [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661145, 'name': PowerOnVM_Task, 'duration_secs': 0.58988} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.705465] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1081.705465] env[65758]: INFO nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Took 9.31 seconds to spawn the instance on the hypervisor. [ 1081.705465] env[65758]: DEBUG nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1081.706360] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ba6673-71d2-4cea-b953-1ea0a6e59a09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.754846] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-5fc4f1b8-9024-4155-b56d-56a8d08f0259" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.851936] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661149, 'name': ReconfigVM_Task, 'duration_secs': 0.461165} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.852496] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Reconfigured VM instance instance-00000051 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1081.854378] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64189f95-e0ab-4d73-b1c5-ee77ad1474ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.882820] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 76ec31e6-65c2-4290-9ec0-b274be95baa4/76ec31e6-65c2-4290-9ec0-b274be95baa4.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.885029] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a6fe2bd-41f9-4608-87fb-d11a52432a94 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.899249] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad91d075-83ec-4b96-9781-50d184e613b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.902807] env[65758]: INFO nova.compute.manager [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Resuming [ 1081.903290] env[65758]: DEBUG nova.objects.instance [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lazy-loading 'flavor' on Instance uuid 63b744d2-541a-42e3-9717-b06a4459fd50 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.912030] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ce965c-24d2-452f-94c4-b541b448052a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.917814] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1081.917814] env[65758]: value = "task-4661151" [ 1081.917814] env[65758]: _type = "Task" [ 1081.917814] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.957261] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f11cc67-d733-4002-85a5-7038547ccbb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.960793] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.960793] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661151, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.967478] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7accc4-a13e-41c4-87b7-596ac4d24614 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.985634] env[65758]: DEBUG nova.compute.provider_tree [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.997083] env[65758]: DEBUG nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1081.997706] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1081.997910] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.998106] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1081.998342] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.998469] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1081.998591] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1081.998793] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1081.998947] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1081.999117] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1081.999295] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1081.999495] env[65758]: DEBUG nova.virt.hardware [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1082.001058] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59247d0-9445-425d-a529-25dc310bc343 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.013972] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07633752-596e-457d-a0d3-fdb4cd6adef6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.181561] env[65758]: WARNING openstack [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1082.182021] env[65758]: WARNING openstack [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1082.222167] env[65758]: DEBUG nova.network.neutron [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1082.234026] env[65758]: INFO nova.compute.manager [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Took 16.03 seconds to build instance. [ 1082.241390] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.241632] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.241926] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.242043] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.242187] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.245502] env[65758]: INFO nova.compute.manager [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Terminating instance [ 1082.258937] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8b0693ab-f7d9-4a5c-83cd-d32d3b5b380e tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-5fc4f1b8-9024-4155-b56d-56a8d08f0259-608946a3-79b3-484c-b023-da1a84676162" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.668s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.336206] env[65758]: WARNING neutronclient.v2_0.client [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1082.336936] env[65758]: WARNING openstack [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1082.337302] env[65758]: WARNING openstack [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1082.436135] env[65758]: DEBUG oslo_vmware.api [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661151, 'name': ReconfigVM_Task, 'duration_secs': 0.30594} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.442388] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 76ec31e6-65c2-4290-9ec0-b274be95baa4/76ec31e6-65c2-4290-9ec0-b274be95baa4.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1082.442943] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance '76ec31e6-65c2-4290-9ec0-b274be95baa4' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1082.448952] env[65758]: DEBUG nova.compute.manager [req-83d2cec0-d447-4ec9-a40e-4195efe2a999 req-32355284-3b7f-4ef5-b30f-a9c27184e74e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Received event network-vif-deleted-e91b61d1-ee47-49e8-a302-26b7b0725dff {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1082.449446] env[65758]: INFO nova.compute.manager [req-83d2cec0-d447-4ec9-a40e-4195efe2a999 req-32355284-3b7f-4ef5-b30f-a9c27184e74e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Neutron deleted interface e91b61d1-ee47-49e8-a302-26b7b0725dff; detaching it from the instance and deleting it from the info cache [ 1082.449741] env[65758]: DEBUG nova.network.neutron [req-83d2cec0-d447-4ec9-a40e-4195efe2a999 req-32355284-3b7f-4ef5-b30f-a9c27184e74e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1082.451159] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661150, 'name': ReconfigVM_Task, 'duration_secs': 0.675657} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.456034] env[65758]: DEBUG nova.network.neutron [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance_info_cache with network_info: [{"id": "05e0fa46-1b67-477a-bc40-26c9641f6549", "address": "fa:16:3e:6d:7e:f5", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05e0fa46-1b", "ovs_interfaceid": "05e0fa46-1b67-477a-bc40-26c9641f6549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1082.456034] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81/47cebd84-f9a1-4997-96aa-c76c5faa8c81.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1082.456998] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3517598-7662-4445-9508-dd9e411d6ee1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.468233] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1082.468233] env[65758]: value = "task-4661152" [ 1082.468233] env[65758]: _type = "Task" [ 1082.468233] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.478558] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661152, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.492279] env[65758]: DEBUG nova.scheduler.client.report [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.584564] env[65758]: DEBUG nova.network.neutron [-] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1082.736145] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3d34258b-c3ca-4adc-9c92-16dbe9337f40 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.550s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.750916] env[65758]: DEBUG nova.compute.manager [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1082.751190] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1082.752209] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36898fb-e4b4-42e4-a993-374356d2247c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.761352] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1082.761646] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55788b72-15ef-42ba-b4c9-ea885bf6d529 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.770282] env[65758]: DEBUG oslo_vmware.api [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1082.770282] env[65758]: value = "task-4661153" [ 1082.770282] env[65758]: _type = "Task" [ 1082.770282] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.779553] env[65758]: DEBUG oslo_vmware.api [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661153, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.956223] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2497eb25-ad95-4f3a-ac86-009613798610 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.960213] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.960671] env[65758]: DEBUG nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Instance network_info: |[{"id": "05e0fa46-1b67-477a-bc40-26c9641f6549", "address": "fa:16:3e:6d:7e:f5", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05e0fa46-1b", "ovs_interfaceid": "05e0fa46-1b67-477a-bc40-26c9641f6549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1082.962018] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fe14728-9d88-4455-a2c5-b727f04197d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.963856] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:7e:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05e0fa46-1b67-477a-bc40-26c9641f6549', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1082.972369] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1082.973306] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1082.977129] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c68876d-0bd3-4662-9e34-625dafeae897 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.011454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.476s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.012899] env[65758]: WARNING neutronclient.v2_0.client [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1083.017254] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.295s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.017436] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.017617] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1083.018261] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.388s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.018623] env[65758]: DEBUG nova.objects.instance [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lazy-loading 'resources' on Instance uuid 0ce11868-fee2-40d3-9433-7bc398a1f756 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.024628] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641ec63d-6829-4da3-9730-ba41c1e02943 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.037671] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a05f077-5e71-4a50-9182-c333c97fd9dc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.048348] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfb459f-ceb1-4107-b9c5-2b71b3fe64f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.053374] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.053374] env[65758]: value = "task-4661154" [ 1083.053374] env[65758]: _type = "Task" [ 1083.053374] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.053701] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661152, 'name': Rename_Task, 'duration_secs': 0.342755} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.055154] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1083.061081] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a1beb99-00c2-4d55-a216-7d51aebf1aef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.084068] env[65758]: INFO nova.network.neutron [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating port 2adc4687-14f6-4742-8afd-a86473befd61 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1083.084351] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance '76ec31e6-65c2-4290-9ec0-b274be95baa4' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1083.105843] env[65758]: INFO nova.compute.manager [-] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Took 1.78 seconds to deallocate network for instance. [ 1083.107545] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315e26b7-bd90-40fe-b193-d30864547afe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.118119] env[65758]: DEBUG nova.compute.manager [req-83d2cec0-d447-4ec9-a40e-4195efe2a999 req-32355284-3b7f-4ef5-b30f-a9c27184e74e service nova] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Detach interface failed, port_id=e91b61d1-ee47-49e8-a302-26b7b0725dff, reason: Instance 5fc4f1b8-9024-4155-b56d-56a8d08f0259 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1083.129208] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1083.129208] env[65758]: value = "task-4661155" [ 1083.129208] env[65758]: _type = "Task" [ 1083.129208] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.129208] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661154, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.149551] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4077fb38-9761-4dd7-bcc5-b6c03f71dbe1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.156938] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661155, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.165033] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b741b83a-4795-4d58-93fc-29a90e7e82f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.207502] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178854MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1083.207643] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.282439] env[65758]: DEBUG oslo_vmware.api [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661153, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.419716] env[65758]: DEBUG oslo_concurrency.lockutils [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.420065] env[65758]: DEBUG oslo_concurrency.lockutils [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquired lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.420286] env[65758]: DEBUG nova.network.neutron [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1083.423822] env[65758]: DEBUG nova.compute.manager [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Received event network-vif-plugged-05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1083.425459] env[65758]: DEBUG oslo_concurrency.lockutils [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Acquiring lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.425698] env[65758]: DEBUG oslo_concurrency.lockutils [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.425728] env[65758]: DEBUG oslo_concurrency.lockutils [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.426554] env[65758]: DEBUG nova.compute.manager [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] No waiting events found dispatching network-vif-plugged-05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1083.426554] env[65758]: WARNING nova.compute.manager [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Received unexpected event network-vif-plugged-05e0fa46-1b67-477a-bc40-26c9641f6549 for instance with vm_state building and task_state spawning. [ 1083.426554] env[65758]: DEBUG nova.compute.manager [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Received event network-changed-05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1083.426554] env[65758]: DEBUG nova.compute.manager [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Refreshing instance network info cache due to event network-changed-05e0fa46-1b67-477a-bc40-26c9641f6549. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1083.426554] env[65758]: DEBUG oslo_concurrency.lockutils [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Acquiring lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.426744] env[65758]: DEBUG oslo_concurrency.lockutils [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Acquired lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.426821] env[65758]: DEBUG nova.network.neutron [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Refreshing network info cache for port 05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1083.568522] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661154, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.607187] env[65758]: WARNING neutronclient.v2_0.client [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1083.607856] env[65758]: WARNING neutronclient.v2_0.client [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1083.620928] env[65758]: DEBUG nova.compute.manager [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1083.622175] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda38082-a0ea-434a-87d0-98be0e1219f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.633612] env[65758]: DEBUG oslo_concurrency.lockutils [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.633612] env[65758]: DEBUG oslo_concurrency.lockutils [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.633612] env[65758]: DEBUG nova.compute.manager [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1083.637099] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2382642f-4b03-412c-bc85-9526cef68970 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.641269] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.654605] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661155, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.657210] env[65758]: DEBUG nova.network.neutron [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Port 2e41907c-1553-48df-9644-cb422d2f19df binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 1083.658652] env[65758]: DEBUG nova.compute.manager [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1083.659245] env[65758]: DEBUG nova.objects.instance [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'flavor' on Instance uuid 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.782525] env[65758]: DEBUG oslo_vmware.api [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661153, 'name': PowerOffVM_Task, 'duration_secs': 0.729596} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.786154] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1083.786368] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1083.786896] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41f19681-56a7-4f27-89c5-269e94fc4518 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.800589] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806020ec-c853-4e8a-bfae-b47e0d626430 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.809582] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45887ad8-9fb5-4871-9643-0466b4683781 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.852833] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6281a1c9-024c-4f4c-af83-356528fef31e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.863657] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3096c8ad-c4f7-402d-abe2-833181bc72cf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.881066] env[65758]: DEBUG nova.compute.provider_tree [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.884446] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1083.884446] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1083.884446] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleting the datastore file [datastore1] 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.884773] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a21c239-92c9-4a0b-9147-ac8b393a11c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.895774] env[65758]: DEBUG oslo_vmware.api [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1083.895774] env[65758]: value = "task-4661157" [ 1083.895774] env[65758]: _type = "Task" [ 1083.895774] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.908432] env[65758]: DEBUG oslo_vmware.api [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661157, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.929968] env[65758]: WARNING neutronclient.v2_0.client [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1083.930777] env[65758]: WARNING openstack [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1083.931174] env[65758]: WARNING openstack [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1083.939526] env[65758]: WARNING neutronclient.v2_0.client [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1083.940158] env[65758]: WARNING openstack [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1083.940511] env[65758]: WARNING openstack [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1084.071594] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661154, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.148834] env[65758]: INFO nova.compute.manager [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] instance snapshotting [ 1084.154343] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7538690d-1a04-4748-bb44-4b3b1a879db8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.174023] env[65758]: DEBUG oslo_vmware.api [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661155, 'name': PowerOnVM_Task, 'duration_secs': 0.828166} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.177208] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1084.177463] env[65758]: INFO nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1084.177649] env[65758]: DEBUG nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1084.178883] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd11e19-47f7-4e96-9ff7-26c3704bdbdd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.202604] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad595a7-78d8-4cd8-b95b-3768b1f43b4d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.334631] env[65758]: WARNING neutronclient.v2_0.client [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1084.336065] env[65758]: WARNING openstack [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1084.336569] env[65758]: WARNING openstack [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1084.386534] env[65758]: DEBUG nova.scheduler.client.report [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1084.394092] env[65758]: WARNING neutronclient.v2_0.client [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1084.395222] env[65758]: WARNING openstack [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1084.395889] env[65758]: WARNING openstack [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1084.426297] env[65758]: DEBUG oslo_vmware.api [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661157, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231175} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.426376] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1084.426557] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1084.426775] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1084.427015] env[65758]: INFO nova.compute.manager [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1084.427321] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1084.427590] env[65758]: DEBUG nova.compute.manager [-] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1084.427738] env[65758]: DEBUG nova.network.neutron [-] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1084.427936] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1084.428497] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1084.428801] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1084.575337] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661154, 'name': CreateVM_Task, 'duration_secs': 1.449046} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.575582] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1084.576122] env[65758]: WARNING neutronclient.v2_0.client [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1084.576860] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910036', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'name': 'volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '149655f8-fcf5-4cfe-ab96-1171b9d3b550', 'attached_at': '', 'detached_at': '', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'serial': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1'}, 'attachment_id': '66e06035-1f24-44a0-a832-83737abdde25', 'disk_bus': None, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=65758) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1084.577087] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Root volume attach. Driver type: vmdk {{(pid=65758) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1084.577975] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1770c715-1043-421e-b31e-6889e2757090 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.588580] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6c0e40-23a9-4838-802d-1c7a45c52124 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.597107] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cca1328-6ad5-4399-8894-aa8985b053fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.605923] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-8ecb10a3-6d37-4775-aaca-88a187ea2cab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.614692] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1084.614692] env[65758]: value = "task-4661158" [ 1084.614692] env[65758]: _type = "Task" [ 1084.614692] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.631445] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661158, 'name': RelocateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.639031] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1084.691971] env[65758]: DEBUG nova.network.neutron [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [{"id": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "address": "fa:16:3e:01:98:57", "network": {"id": "ae597de5-fdad-4dbc-8b33-969103bac012", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-856542556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8be788d761114dfca7244f953b571c7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c394c9-9b", "ovs_interfaceid": "83c394c9-9b0d-40ad-923c-00e70d63c85a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1084.693991] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1084.700284] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03cd3b50-1040-4eb9-857d-58e7daad62da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.711602] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.712342] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.712919] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.716454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.716687] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.716945] env[65758]: DEBUG nova.network.neutron [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1084.733733] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1084.734726] env[65758]: INFO nova.compute.manager [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Took 17.37 seconds to build instance. [ 1084.736769] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0cf322e6-5df9-4b46-9051-a7ddc06666f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.743029] env[65758]: DEBUG oslo_vmware.api [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1084.743029] env[65758]: value = "task-4661159" [ 1084.743029] env[65758]: _type = "Task" [ 1084.743029] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.753505] env[65758]: DEBUG nova.network.neutron [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updated VIF entry in instance network info cache for port 05e0fa46-1b67-477a-bc40-26c9641f6549. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1084.754066] env[65758]: DEBUG nova.network.neutron [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance_info_cache with network_info: [{"id": "05e0fa46-1b67-477a-bc40-26c9641f6549", "address": "fa:16:3e:6d:7e:f5", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05e0fa46-1b", "ovs_interfaceid": "05e0fa46-1b67-477a-bc40-26c9641f6549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1084.759026] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1084.759026] env[65758]: value = "task-4661160" [ 1084.759026] env[65758]: _type = "Task" [ 1084.759026] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.770900] env[65758]: DEBUG oslo_vmware.api [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.780932] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661160, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.853623] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.853910] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.893681] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.876s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.896177] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.688s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.127033] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661158, 'name': RelocateVM_Task, 'duration_secs': 0.477292} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.127033] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1085.127033] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910036', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'name': 'volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '149655f8-fcf5-4cfe-ab96-1171b9d3b550', 'attached_at': '', 'detached_at': '', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'serial': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1085.127721] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84370da1-1751-449d-91d9-dc92b0bbe4f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.146906] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f900fb-e99b-4900-b8d2-c5921a9f3926 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.169337] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1/volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.169763] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85612b01-fece-4b06-91a9-19006ce51b65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.191768] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1085.191768] env[65758]: value = "task-4661161" [ 1085.191768] env[65758]: _type = "Task" [ 1085.191768] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.203038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Releasing lock "refresh_cache-63b744d2-541a-42e3-9717-b06a4459fd50" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.203038] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4483270e-a3c7-4a83-a218-8e2d413f8cc5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.215901] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.216286] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Resuming the VM {{(pid=65758) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1085.219026] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d41fc1c-92e1-4fb1-90da-ca9954a6ec5d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.229799] env[65758]: DEBUG oslo_vmware.api [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1085.229799] env[65758]: value = "task-4661162" [ 1085.229799] env[65758]: _type = "Task" [ 1085.229799] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.233303] env[65758]: WARNING neutronclient.v2_0.client [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.235902] env[65758]: WARNING openstack [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1085.235902] env[65758]: WARNING openstack [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1085.244249] env[65758]: DEBUG oslo_concurrency.lockutils [None req-03bdb5b4-cc5e-403c-9cf4-9a52ebb45dd2 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.888s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.252929] env[65758]: DEBUG oslo_vmware.api [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661162, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.260338] env[65758]: DEBUG oslo_concurrency.lockutils [req-e74caee4-ad78-4aa4-9794-2c88d8926d39 req-3400bb3e-55d3-4e30-b977-f88b9ad15f70 service nova] Releasing lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.260810] env[65758]: DEBUG oslo_vmware.api [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661159, 'name': PowerOffVM_Task, 'duration_secs': 0.256709} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.264447] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1085.264655] env[65758]: DEBUG nova.compute.manager [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1085.266389] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d629732-4ff8-4f39-91e7-479167d69270 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.283878] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661160, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.357890] env[65758]: DEBUG nova.compute.utils [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1085.418025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4901f2cf-4026-4632-b63a-f55735744b2d tempest-ServerActionsV293TestJSON-1832507352 tempest-ServerActionsV293TestJSON-1832507352-project-member] Lock "0ce11868-fee2-40d3-9433-7bc398a1f756" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.238s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.447523] env[65758]: DEBUG nova.compute.manager [req-ee6b9d88-107e-4353-8f7e-0c745136ded7 req-a9bbe753-de2d-4a46-8122-4f2da7802ed9 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Received event network-vif-deleted-216bffab-4451-407d-b8dd-9e8687a90b81 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1085.447925] env[65758]: INFO nova.compute.manager [req-ee6b9d88-107e-4353-8f7e-0c745136ded7 req-a9bbe753-de2d-4a46-8122-4f2da7802ed9 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Neutron deleted interface 216bffab-4451-407d-b8dd-9e8687a90b81; detaching it from the instance and deleting it from the info cache [ 1085.448049] env[65758]: DEBUG nova.network.neutron [req-ee6b9d88-107e-4353-8f7e-0c745136ded7 req-a9bbe753-de2d-4a46-8122-4f2da7802ed9 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1085.483216] env[65758]: WARNING neutronclient.v2_0.client [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.484044] env[65758]: WARNING openstack [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1085.484353] env[65758]: WARNING openstack [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1085.615125] env[65758]: DEBUG nova.network.neutron [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [{"id": "2adc4687-14f6-4742-8afd-a86473befd61", "address": "fa:16:3e:63:9e:d9", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc4687-14", "ovs_interfaceid": "2adc4687-14f6-4742-8afd-a86473befd61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1085.632294] env[65758]: DEBUG nova.compute.manager [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received event network-vif-plugged-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1085.632485] env[65758]: DEBUG oslo_concurrency.lockutils [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.632824] env[65758]: DEBUG oslo_concurrency.lockutils [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.633170] env[65758]: DEBUG oslo_concurrency.lockutils [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.633431] env[65758]: DEBUG nova.compute.manager [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] No waiting events found dispatching network-vif-plugged-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1085.633547] env[65758]: WARNING nova.compute.manager [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received unexpected event network-vif-plugged-2adc4687-14f6-4742-8afd-a86473befd61 for instance with vm_state shelved_offloaded and task_state spawning. [ 1085.633712] env[65758]: DEBUG nova.compute.manager [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received event network-changed-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1085.633869] env[65758]: DEBUG nova.compute.manager [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Refreshing instance network info cache due to event network-changed-2adc4687-14f6-4742-8afd-a86473befd61. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1085.634055] env[65758]: DEBUG oslo_concurrency.lockutils [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Acquiring lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.707980] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661161, 'name': ReconfigVM_Task, 'duration_secs': 0.356021} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.708777] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Reconfigured VM instance instance-00000064 to attach disk [datastore2] volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1/volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.714520] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-187a5559-8a12-4f55-9c05-644524b27521 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.729364] env[65758]: DEBUG nova.network.neutron [-] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1085.736976] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1085.736976] env[65758]: value = "task-4661163" [ 1085.736976] env[65758]: _type = "Task" [ 1085.736976] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.741316] env[65758]: DEBUG oslo_vmware.api [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661162, 'name': PowerOnVM_Task} progress is 93%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.746457] env[65758]: WARNING neutronclient.v2_0.client [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1085.756316] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661163, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.777545] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661160, 'name': CreateSnapshot_Task, 'duration_secs': 0.672525} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.777805] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1085.778901] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c327dcd-7f23-4d80-87bd-4b30e686008b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.788312] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.788506] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.788675] env[65758]: DEBUG nova.network.neutron [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1085.794803] env[65758]: DEBUG oslo_concurrency.lockutils [None req-409d6794-5fa1-47ad-9df0-f3bee01fe3ee tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.161s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.861555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.914913] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Applying migration context for instance 76ec31e6-65c2-4290-9ec0-b274be95baa4 as it has an incoming, in-progress migration c2b17f22-6bec-4cfa-bbde-36c745a9c6b9. Migration status is post-migrating {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1085.916913] env[65758]: INFO nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating resource usage from migration c2b17f22-6bec-4cfa-bbde-36c745a9c6b9 [ 1085.950207] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 37aadd44-79e8-4479-862f-265549c9d802 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.950433] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ec1e2845-e73a-40ff-9b6c-1d8281859fba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.950542] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 974d06c1-2704-4a78-bbd7-f54335c4288e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.950869] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.950869] env[65758]: WARNING nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 5fc4f1b8-9024-4155-b56d-56a8d08f0259 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1085.951062] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ade1d760-e3e7-49c8-ba9d-b4829ca60841 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.951062] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 31b7d1ee-58c1-47f3-a862-0bc5cb17addc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.951224] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a014debf-2f16-4b30-af78-27a6751060de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.951363] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 63b744d2-541a-42e3-9717-b06a4459fd50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.951495] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.952589] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 47cebd84-f9a1-4997-96aa-c76c5faa8c81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.952589] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance afc1eb16-c275-4b3b-a7fe-9938d2241e24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.952589] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Migration c2b17f22-6bec-4cfa-bbde-36c745a9c6b9 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 1085.952589] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 76ec31e6-65c2-4290-9ec0-b274be95baa4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.952589] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 149655f8-fcf5-4cfe-ab96-1171b9d3b550 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1085.952589] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1085.952953] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3264MB phys_disk=100GB used_disk=13GB total_vcpus=48 used_vcpus=14 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '13', 'num_vm_active': '8', 'num_task_None': '6', 'num_os_type_None': '13', 'num_proj_e114eef3998848699a9a086fee86db29': '3', 'io_workload': '1', 'num_proj_693b129cd84f4eee9971e7221e92c3e0': '3', 'num_vm_rescued': '1', 'num_task_deleting': '1', 'num_vm_suspended': '1', 'num_task_resuming': '1', 'num_proj_8be788d761114dfca7244f953b571c7d': '1', 'num_vm_stopped': '1', 'num_task_resize_migrated': '1', 'num_proj_4095654557a34bb0907071aedb3bb678': '1', 'num_vm_shelved_offloaded': '1', 'num_task_spawning': '2', 'num_proj_e2440f1694fe4b87a9827f6653ff2e4c': '1', 'num_proj_16188c7bd36d4b0eaffdc980b71ac727': '1', 'num_task_image_pending_upload': '1', 'num_proj_02d1056adfc646858ba42771ad01c221': '1', 'num_task_powering-off': '1', 'num_proj_bad3e3c7054c424a800cb12e9c5dbb31': '1', 'num_proj_c4c2ab2b80c04c38bfb4c7cafac87fe6': '1', 'num_vm_building': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1085.956615] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae28195d-e129-41b4-9d1b-7ea5ceb4f447 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.972209] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3f7e00-7722-4160-94fe-2ea162241ed0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.018201] env[65758]: DEBUG nova.compute.manager [req-ee6b9d88-107e-4353-8f7e-0c745136ded7 req-a9bbe753-de2d-4a46-8122-4f2da7802ed9 service nova] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Detach interface failed, port_id=216bffab-4451-407d-b8dd-9e8687a90b81, reason: Instance 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1086.119661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.122893] env[65758]: DEBUG oslo_concurrency.lockutils [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Acquired lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.123147] env[65758]: DEBUG nova.network.neutron [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Refreshing network info cache for port 2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1086.158314] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='cdcbb70ec1ffddd1a8f62690a73b711e',container_format='bare',created_at=2025-11-21T13:21:38Z,direct_url=,disk_format='vmdk',id=2edcb03c-85ab-4d21-8ff4-b3b47fae6985,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2033835035-shelved',owner='e2440f1694fe4b87a9827f6653ff2e4c',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-11-21T13:21:53Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1086.158458] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1086.158631] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1086.159114] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1086.159114] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1086.159198] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1086.159393] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1086.159563] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1086.159725] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1086.159886] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1086.160076] env[65758]: DEBUG nova.virt.hardware [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1086.160971] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75815f28-e280-4f98-a0de-76583bd0d664 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.176086] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8d5666-33ca-4e34-b53c-ba74e7b5b16c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.194916] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:9e:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2adc4687-14f6-4742-8afd-a86473befd61', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1086.203419] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1086.206762] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1086.207706] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71bdeadf-9741-456e-bca3-f5e6ade914b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.232800] env[65758]: INFO nova.compute.manager [-] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Took 1.80 seconds to deallocate network for instance. [ 1086.233007] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1086.233007] env[65758]: value = "task-4661164" [ 1086.233007] env[65758]: _type = "Task" [ 1086.233007] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.244708] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831c792f-756c-4827-9909-f5480623f850 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.257923] env[65758]: DEBUG oslo_vmware.api [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661162, 'name': PowerOnVM_Task, 'duration_secs': 0.763461} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.265811] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Resumed the VM {{(pid=65758) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1086.265811] env[65758]: DEBUG nova.compute.manager [None req-591bea02-b294-4de9-b542-95bed71baf82 tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1086.265811] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661164, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.266855] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801a17e0-05ba-4ce5-9a11-ed9953019cea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.271323] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca97635-774d-49e6-b766-a9cdee33fb3f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.278290] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661163, 'name': ReconfigVM_Task, 'duration_secs': 0.161403} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.279244] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910036', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'name': 'volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '149655f8-fcf5-4cfe-ab96-1171b9d3b550', 'attached_at': '', 'detached_at': '', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'serial': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1086.279791] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-603238b3-0e84-4b0f-ac3a-dfd979968235 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.317935] env[65758]: WARNING neutronclient.v2_0.client [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1086.317935] env[65758]: WARNING openstack [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1086.317935] env[65758]: WARNING openstack [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1086.333273] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1086.334011] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1086.334011] env[65758]: value = "task-4661165" [ 1086.334011] env[65758]: _type = "Task" [ 1086.334011] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.335439] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ea5389db-90f0-40ca-83c1-a03196af7093 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.339196] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e415e512-44e2-411c-9bd1-6eba230b0d31 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.359501] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63af3fb-05b3-48d8-970f-ce83fdfecdaf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.362574] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661165, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.363315] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1086.363315] env[65758]: value = "task-4661166" [ 1086.363315] env[65758]: _type = "Task" [ 1086.363315] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.376311] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.387567] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661166, 'name': CloneVM_Task} progress is 11%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.558700] env[65758]: WARNING neutronclient.v2_0.client [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1086.559631] env[65758]: WARNING openstack [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1086.560147] env[65758]: WARNING openstack [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1086.626796] env[65758]: WARNING neutronclient.v2_0.client [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1086.627710] env[65758]: WARNING openstack [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1086.628072] env[65758]: WARNING openstack [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1086.714181] env[65758]: DEBUG nova.network.neutron [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [{"id": "2e41907c-1553-48df-9644-cb422d2f19df", "address": "fa:16:3e:b2:e3:b9", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e41907c-15", "ovs_interfaceid": "2e41907c-1553-48df-9644-cb422d2f19df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1086.751109] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661164, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.752482] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.856054] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661165, 'name': Rename_Task, 'duration_secs': 0.505208} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.856054] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.856054] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27298a1e-3321-4619-8367-0661c87b811c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.864393] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1086.864393] env[65758]: value = "task-4661167" [ 1086.864393] env[65758]: _type = "Task" [ 1086.864393] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.878478] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661167, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.883176] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1086.886946] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661166, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.962831] env[65758]: WARNING neutronclient.v2_0.client [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1086.963882] env[65758]: WARNING openstack [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1086.963967] env[65758]: WARNING openstack [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1087.060058] env[65758]: DEBUG nova.network.neutron [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updated VIF entry in instance network info cache for port 2adc4687-14f6-4742-8afd-a86473befd61. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1087.060459] env[65758]: DEBUG nova.network.neutron [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [{"id": "2adc4687-14f6-4742-8afd-a86473befd61", "address": "fa:16:3e:63:9e:d9", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adc4687-14", "ovs_interfaceid": "2adc4687-14f6-4742-8afd-a86473befd61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1087.078963] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.079279] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.079494] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.079929] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.080148] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.082621] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.082737] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.082893] env[65758]: INFO nova.compute.manager [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Attaching volume da1ad087-09d2-4369-aa55-0371cd8a59fe to /dev/sdb [ 1087.088261] env[65758]: INFO nova.compute.manager [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Terminating instance [ 1087.131920] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82588ccc-378e-45d0-9d78-6aeb60e854e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.148657] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c40f76-5f75-42b4-99ad-0c4fee0e1e5b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.164590] env[65758]: DEBUG nova.virt.block_device [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updating existing volume attachment record: 54a678bb-c7ac-4808-98bc-78e50761f99c {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1087.217649] env[65758]: DEBUG oslo_concurrency.lockutils [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.251436] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661164, 'name': CreateVM_Task, 'duration_secs': 0.618651} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.251640] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1087.252232] env[65758]: WARNING neutronclient.v2_0.client [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1087.252623] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.252783] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.253180] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1087.253546] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd2d51ce-c509-4cbe-bf38-1adf6e61ecfc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.259360] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1087.259360] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52fd8f4b-d5c2-78cc-2cc1-3f08f44e68b0" [ 1087.259360] env[65758]: _type = "Task" [ 1087.259360] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.269879] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52fd8f4b-d5c2-78cc-2cc1-3f08f44e68b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.378448] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661167, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.382014] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661166, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.389102] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1087.389422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.493s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.389769] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.749s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.390108] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.393343] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.640s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.393343] env[65758]: DEBUG nova.objects.instance [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lazy-loading 'resources' on Instance uuid 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1087.417915] env[65758]: INFO nova.scheduler.client.report [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleted allocations for instance 5fc4f1b8-9024-4155-b56d-56a8d08f0259 [ 1087.498540] env[65758]: DEBUG nova.compute.manager [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Received event network-changed-67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1087.498839] env[65758]: DEBUG nova.compute.manager [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Refreshing instance network info cache due to event network-changed-67e62b92-0851-4648-b7d7-181b274c8325. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1087.499178] env[65758]: DEBUG oslo_concurrency.lockutils [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] Acquiring lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.499386] env[65758]: DEBUG oslo_concurrency.lockutils [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] Acquired lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.499629] env[65758]: DEBUG nova.network.neutron [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Refreshing network info cache for port 67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1087.563532] env[65758]: DEBUG oslo_concurrency.lockutils [req-67fdc710-8eea-46ac-8d48-452182de94c8 req-63aefa25-8b11-468e-b520-548907ded2d7 service nova] Releasing lock "refresh_cache-afc1eb16-c275-4b3b-a7fe-9938d2241e24" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.593523] env[65758]: DEBUG nova.compute.manager [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1087.593897] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1087.595317] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8add0b69-29c6-458e-90a0-7309ac33840f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.607169] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1087.607567] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82355dbc-d5c1-4786-b24c-d7cb76258585 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.705244] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.705499] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.705724] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleting the datastore file [datastore1] 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.706324] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a46a644-545b-44ba-93cd-d13a49c97e1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.715200] env[65758]: DEBUG oslo_vmware.api [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1087.715200] env[65758]: value = "task-4661170" [ 1087.715200] env[65758]: _type = "Task" [ 1087.715200] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.727695] env[65758]: DEBUG oslo_vmware.api [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.744669] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7894da-cfeb-4a9a-b936-1159ea1b449c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.771089] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd63f6f5-7086-4073-8ea4-438ad81ef5d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.785770] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance '76ec31e6-65c2-4290-9ec0-b274be95baa4' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1087.791682] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.791682] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Processing image 2edcb03c-85ab-4d21-8ff4-b3b47fae6985 {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1087.791682] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985/2edcb03c-85ab-4d21-8ff4-b3b47fae6985.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.791682] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985/2edcb03c-85ab-4d21-8ff4-b3b47fae6985.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.791682] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.791682] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d79f141-69f2-4626-9b79-14e19c762ba4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.812687] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.812979] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1087.813833] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e64a77db-d6f6-4679-a6c4-888eebd38e41 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.820353] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1087.820353] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5254b24f-1ca0-b828-f77c-aba5d62e52ee" [ 1087.820353] env[65758]: _type = "Task" [ 1087.820353] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.830227] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5254b24f-1ca0-b828-f77c-aba5d62e52ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.883347] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661166, 'name': CloneVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.887148] env[65758]: DEBUG oslo_vmware.api [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661167, 'name': PowerOnVM_Task, 'duration_secs': 0.712892} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.887389] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.887612] env[65758]: INFO nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Took 5.89 seconds to spawn the instance on the hypervisor. [ 1087.887806] env[65758]: DEBUG nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1087.889539] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323851d6-1239-40ad-8528-08130c8f86c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.929881] env[65758]: DEBUG oslo_concurrency.lockutils [None req-738f55ba-79b2-4fc6-8f96-4672e6a8c0fd tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "5fc4f1b8-9024-4155-b56d-56a8d08f0259" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.308s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.003477] env[65758]: WARNING neutronclient.v2_0.client [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1088.004675] env[65758]: WARNING openstack [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1088.005349] env[65758]: WARNING openstack [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1088.119820] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a2282c-45ee-4edd-bf1f-d476c1573a47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.128506] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bf9ef8-cef3-482a-9e6c-ff3054c2a7ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.167914] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f29907a-220c-46f1-8ff7-e7dee365ad28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.176716] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4274a1f-9024-407a-98e4-513e4f6ed46c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.191202] env[65758]: DEBUG nova.compute.provider_tree [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1088.225737] env[65758]: DEBUG oslo_vmware.api [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159369} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.227115] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.227115] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1088.227115] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1088.227115] env[65758]: INFO nova.compute.manager [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1088.227115] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1088.228048] env[65758]: WARNING neutronclient.v2_0.client [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1088.228721] env[65758]: WARNING openstack [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1088.229092] env[65758]: WARNING openstack [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1088.237912] env[65758]: DEBUG nova.compute.manager [-] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1088.237912] env[65758]: DEBUG nova.network.neutron [-] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1088.237912] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1088.238106] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1088.238363] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1088.275414] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1088.293939] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-93df5e6e-1ce1-4fcd-8547-e1e2739d6db5 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance '76ec31e6-65c2-4290-9ec0-b274be95baa4' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1088.332791] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Preparing fetch location {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1088.333222] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Fetch image to [datastore1] OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8/OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8.vmdk {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1088.333552] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Downloading stream optimized image 2edcb03c-85ab-4d21-8ff4-b3b47fae6985 to [datastore1] OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8/OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8.vmdk on the data store datastore1 as vApp {{(pid=65758) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1088.333901] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Downloading image file data 2edcb03c-85ab-4d21-8ff4-b3b47fae6985 to the ESX as VM named 'OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8' {{(pid=65758) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1088.344924] env[65758]: DEBUG nova.network.neutron [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updated VIF entry in instance network info cache for port 67e62b92-0851-4648-b7d7-181b274c8325. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1088.345321] env[65758]: DEBUG nova.network.neutron [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance_info_cache with network_info: [{"id": "67e62b92-0851-4648-b7d7-181b274c8325", "address": "fa:16:3e:23:1f:98", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e62b92-08", "ovs_interfaceid": "67e62b92-0851-4648-b7d7-181b274c8325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1088.392895] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661166, 'name': CloneVM_Task, 'duration_secs': 1.593058} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.393210] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Created linked-clone VM from snapshot [ 1088.394088] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb293c16-1427-4f6f-a0f2-2d455c7eab67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.408526] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Uploading image b87c38de-e73b-49dc-a7dd-1e776ee516c5 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1088.413051] env[65758]: INFO nova.compute.manager [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Took 15.81 seconds to build instance. [ 1088.423417] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1088.423417] env[65758]: value = "resgroup-9" [ 1088.423417] env[65758]: _type = "ResourcePool" [ 1088.423417] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1088.423717] env[65758]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-4639c423-e781-40c9-95dc-aa4ba5bb8497 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.451314] env[65758]: DEBUG oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1088.451314] env[65758]: value = "vm-910048" [ 1088.451314] env[65758]: _type = "VirtualMachine" [ 1088.451314] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1088.451885] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-aaed8f32-2282-410a-a130-c71e013bc9eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.455186] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lease: (returnval){ [ 1088.455186] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5278b036-75d8-80e7-8f71-5a054717f320" [ 1088.455186] env[65758]: _type = "HttpNfcLease" [ 1088.455186] env[65758]: } obtained for vApp import into resource pool (val){ [ 1088.455186] env[65758]: value = "resgroup-9" [ 1088.455186] env[65758]: _type = "ResourcePool" [ 1088.455186] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1088.455533] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the lease: (returnval){ [ 1088.455533] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5278b036-75d8-80e7-8f71-5a054717f320" [ 1088.455533] env[65758]: _type = "HttpNfcLease" [ 1088.455533] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1088.462824] env[65758]: DEBUG oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lease: (returnval){ [ 1088.462824] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529bffed-976a-7b71-b6ef-0ecbc4bd28b9" [ 1088.462824] env[65758]: _type = "HttpNfcLease" [ 1088.462824] env[65758]: } obtained for exporting VM: (result){ [ 1088.462824] env[65758]: value = "vm-910048" [ 1088.462824] env[65758]: _type = "VirtualMachine" [ 1088.462824] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1088.463214] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the lease: (returnval){ [ 1088.463214] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529bffed-976a-7b71-b6ef-0ecbc4bd28b9" [ 1088.463214] env[65758]: _type = "HttpNfcLease" [ 1088.463214] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1088.464983] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1088.464983] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5278b036-75d8-80e7-8f71-5a054717f320" [ 1088.464983] env[65758]: _type = "HttpNfcLease" [ 1088.464983] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1088.473213] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1088.473213] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529bffed-976a-7b71-b6ef-0ecbc4bd28b9" [ 1088.473213] env[65758]: _type = "HttpNfcLease" [ 1088.473213] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1088.607933] env[65758]: DEBUG nova.compute.manager [req-84c711af-53fd-4e3e-94ae-d6785d3ce51e req-bf9edf10-5582-488b-b949-fd2083f83b0f service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Received event network-vif-deleted-4b5db70a-0308-435f-8c89-1a51d34af34e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1088.608106] env[65758]: INFO nova.compute.manager [req-84c711af-53fd-4e3e-94ae-d6785d3ce51e req-bf9edf10-5582-488b-b949-fd2083f83b0f service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Neutron deleted interface 4b5db70a-0308-435f-8c89-1a51d34af34e; detaching it from the instance and deleting it from the info cache [ 1088.608293] env[65758]: DEBUG nova.network.neutron [req-84c711af-53fd-4e3e-94ae-d6785d3ce51e req-bf9edf10-5582-488b-b949-fd2083f83b0f service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1088.720967] env[65758]: ERROR nova.scheduler.client.report [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [req-f4ff7bc0-516b-4bef-bdad-b797b1714f8a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f4ff7bc0-516b-4bef-bdad-b797b1714f8a"}]} [ 1088.739883] env[65758]: DEBUG nova.scheduler.client.report [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1088.755394] env[65758]: DEBUG nova.scheduler.client.report [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1088.755624] env[65758]: DEBUG nova.compute.provider_tree [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1088.769912] env[65758]: DEBUG nova.scheduler.client.report [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: None {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1088.791237] env[65758]: DEBUG nova.scheduler.client.report [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1088.849076] env[65758]: DEBUG oslo_concurrency.lockutils [req-289f31bb-6038-4c87-bf3a-59ff29607933 req-f17c354c-0cdf-4029-81f9-ad5398636eb5 service nova] Releasing lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.919641] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7a248146-7e66-42b2-8422-0d8a87b3ca7b tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.327s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.965178] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1088.965178] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5278b036-75d8-80e7-8f71-5a054717f320" [ 1088.965178] env[65758]: _type = "HttpNfcLease" [ 1088.965178] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1088.974840] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1088.974840] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529bffed-976a-7b71-b6ef-0ecbc4bd28b9" [ 1088.974840] env[65758]: _type = "HttpNfcLease" [ 1088.974840] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1088.975349] env[65758]: DEBUG oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1088.975349] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529bffed-976a-7b71-b6ef-0ecbc4bd28b9" [ 1088.975349] env[65758]: _type = "HttpNfcLease" [ 1088.975349] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1088.975924] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bfb9b2-8ebe-416c-8412-e8d96a9cc581 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.984307] env[65758]: DEBUG oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526f1146-83f7-5fa2-d70b-078ae8121b53/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1088.984506] env[65758]: DEBUG oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526f1146-83f7-5fa2-d70b-078ae8121b53/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1089.055105] env[65758]: DEBUG nova.network.neutron [-] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1089.098728] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f2652ddc-ee60-4977-91c0-373bf772ebab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.111955] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5f6a7ce-366d-429b-8297-933c5b4d18ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.125256] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87412ad2-3780-48bc-9a86-bfabae2b49e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.137702] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c18d9d5-96b1-4bbb-a04f-2cacc80b7d6d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.149512] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5ca76c-2781-4f7a-a921-9b2847f3ddb7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.172156] env[65758]: DEBUG nova.compute.manager [req-84c711af-53fd-4e3e-94ae-d6785d3ce51e req-bf9edf10-5582-488b-b949-fd2083f83b0f service nova] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Detach interface failed, port_id=4b5db70a-0308-435f-8c89-1a51d34af34e, reason: Instance 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1089.202066] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8512aa-251b-4d20-b5c3-9fc82a875578 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.211126] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4586caf-11d5-4d9e-bcda-2759492b569a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.226544] env[65758]: DEBUG nova.compute.provider_tree [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1089.466493] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1089.466493] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5278b036-75d8-80e7-8f71-5a054717f320" [ 1089.466493] env[65758]: _type = "HttpNfcLease" [ 1089.466493] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1089.467880] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1089.467880] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5278b036-75d8-80e7-8f71-5a054717f320" [ 1089.467880] env[65758]: _type = "HttpNfcLease" [ 1089.467880] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1089.467880] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95dca47e-765c-4c7c-8e07-837e3b313cc2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.476933] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52989937-eb76-bffe-20d4-3da51473ebd3/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1089.477209] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52989937-eb76-bffe-20d4-3da51473ebd3/disk-0.vmdk. {{(pid=65758) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1089.549156] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-213ae85f-b292-468b-bf86-9ac115f86a65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.562308] env[65758]: INFO nova.compute.manager [-] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Took 1.32 seconds to deallocate network for instance. [ 1089.791209] env[65758]: DEBUG nova.scheduler.client.report [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 129 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1089.792506] env[65758]: DEBUG nova.compute.provider_tree [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 129 to 130 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1089.792506] env[65758]: DEBUG nova.compute.provider_tree [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1090.071497] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.300640] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.908s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.303829] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.232s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.304287] env[65758]: DEBUG nova.objects.instance [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'resources' on Instance uuid 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.325830] env[65758]: INFO nova.scheduler.client.report [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleted allocations for instance 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49 [ 1090.648207] env[65758]: DEBUG nova.compute.manager [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Received event network-changed-b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1090.648513] env[65758]: DEBUG nova.compute.manager [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Refreshing instance network info cache due to event network-changed-b574c870-790b-4dad-8dce-58d93bb6fe44. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1090.648738] env[65758]: DEBUG oslo_concurrency.lockutils [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Acquiring lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.648988] env[65758]: DEBUG oslo_concurrency.lockutils [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Acquired lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.649179] env[65758]: DEBUG nova.network.neutron [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Refreshing network info cache for port b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1090.666253] env[65758]: WARNING neutronclient.v2_0.client [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1090.667045] env[65758]: WARNING openstack [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1090.667537] env[65758]: WARNING openstack [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1090.751265] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Completed reading data from the image iterator. {{(pid=65758) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1090.751518] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52989937-eb76-bffe-20d4-3da51473ebd3/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1090.752716] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00441cea-0953-45e5-918b-97d15e288b5e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.761910] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52989937-eb76-bffe-20d4-3da51473ebd3/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1090.762303] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52989937-eb76-bffe-20d4-3da51473ebd3/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1090.762677] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-1d3078c9-75c6-4923-a885-97f4aa196014 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.843056] env[65758]: DEBUG oslo_concurrency.lockutils [None req-127a3bdb-8cac-4e36-a057-1f25a5396bf4 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.601s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.895489] env[65758]: WARNING neutronclient.v2_0.client [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1090.896963] env[65758]: WARNING openstack [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1090.897678] env[65758]: WARNING openstack [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1090.997551] env[65758]: DEBUG oslo_vmware.rw_handles [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52989937-eb76-bffe-20d4-3da51473ebd3/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1090.998361] env[65758]: INFO nova.virt.vmwareapi.images [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Downloaded image file data 2edcb03c-85ab-4d21-8ff4-b3b47fae6985 [ 1091.000093] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb4debc-02e4-4d09-aca8-dd202745aed4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.031808] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-588b5745-770b-410f-9e09-963a2dcd253e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.042529] env[65758]: DEBUG nova.network.neutron [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Updated VIF entry in instance network info cache for port b574c870-790b-4dad-8dce-58d93bb6fe44. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1091.043087] env[65758]: DEBUG nova.network.neutron [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Updating instance_info_cache with network_info: [{"id": "b574c870-790b-4dad-8dce-58d93bb6fe44", "address": "fa:16:3e:62:75:62", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb574c870-79", "ovs_interfaceid": "b574c870-790b-4dad-8dce-58d93bb6fe44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1091.065175] env[65758]: INFO nova.virt.vmwareapi.images [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] The imported VM was unregistered [ 1091.067576] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Caching image {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1091.067961] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating directory with path [datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985 {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.074113] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79ef35b1-fe7a-489f-b899-c2c32461ec4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.077571] env[65758]: DEBUG nova.compute.manager [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1091.091589] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Created directory with path [datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985 {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.091800] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8/OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8.vmdk to [datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985/2edcb03c-85ab-4d21-8ff4-b3b47fae6985.vmdk. {{(pid=65758) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1091.095024] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ffb74c54-e70a-4e08-82d5-33cfd6348a58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.103644] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1091.103644] env[65758]: value = "task-4661175" [ 1091.103644] env[65758]: _type = "Task" [ 1091.103644] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.109363] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eec3b8-e7c3-47ee-a1fc-938b66f5eebb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.118457] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661175, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.124731] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2c5d67-64ee-4ee0-8079-92e1a76657aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.163586] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60be590a-acc1-41be-b0ff-b38f97ba7e32 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.170035] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756f3019-66fe-42e6-bdcd-ef06f3071a6d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.186219] env[65758]: DEBUG nova.compute.provider_tree [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.191894] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.192357] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.192678] env[65758]: DEBUG nova.compute.manager [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Going to confirm migration 6 {{(pid=65758) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1091.200781] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "62ae50af-ff52-4084-8161-1a650eff5247" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.201804] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "62ae50af-ff52-4084-8161-1a650eff5247" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.546621] env[65758]: DEBUG oslo_concurrency.lockutils [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Releasing lock "refresh_cache-37aadd44-79e8-4479-862f-265549c9d802" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.547079] env[65758]: DEBUG nova.compute.manager [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Received event network-changed-05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1091.547204] env[65758]: DEBUG nova.compute.manager [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Refreshing instance network info cache due to event network-changed-05e0fa46-1b67-477a-bc40-26c9641f6549. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1091.547508] env[65758]: DEBUG oslo_concurrency.lockutils [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Acquiring lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.547732] env[65758]: DEBUG oslo_concurrency.lockutils [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Acquired lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.547966] env[65758]: DEBUG nova.network.neutron [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Refreshing network info cache for port 05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1091.601770] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.617948] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661175, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.689873] env[65758]: DEBUG nova.scheduler.client.report [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.699969] env[65758]: WARNING neutronclient.v2_0.client [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1091.703831] env[65758]: DEBUG nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1091.717598] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1091.717980] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910049', 'volume_id': 'da1ad087-09d2-4369-aa55-0371cd8a59fe', 'name': 'volume-da1ad087-09d2-4369-aa55-0371cd8a59fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ade1d760-e3e7-49c8-ba9d-b4829ca60841', 'attached_at': '', 'detached_at': '', 'volume_id': 'da1ad087-09d2-4369-aa55-0371cd8a59fe', 'serial': 'da1ad087-09d2-4369-aa55-0371cd8a59fe'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1091.718982] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae806002-d8e1-4df9-bb37-9847bb6f8d6c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.739529] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6a0113-c189-44aa-9407-d2297bc5954d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.768966] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] volume-da1ad087-09d2-4369-aa55-0371cd8a59fe/volume-da1ad087-09d2-4369-aa55-0371cd8a59fe.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.769873] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f39ea84-b938-4664-8716-1a1409172e5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.785526] env[65758]: WARNING neutronclient.v2_0.client [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1091.785867] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.786026] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.786231] env[65758]: DEBUG nova.network.neutron [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1091.786429] env[65758]: DEBUG nova.objects.instance [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'info_cache' on Instance uuid 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.797758] env[65758]: DEBUG oslo_vmware.api [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1091.797758] env[65758]: value = "task-4661177" [ 1091.797758] env[65758]: _type = "Task" [ 1091.797758] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.808035] env[65758]: DEBUG oslo_vmware.api [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661177, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.051672] env[65758]: WARNING neutronclient.v2_0.client [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1092.052624] env[65758]: WARNING openstack [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1092.053036] env[65758]: WARNING openstack [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1092.126440] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661175, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.196457] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.893s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.199714] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.598s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.224692] env[65758]: INFO nova.scheduler.client.report [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted allocations for instance 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc [ 1092.233583] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.286647] env[65758]: WARNING neutronclient.v2_0.client [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1092.287462] env[65758]: WARNING openstack [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1092.288118] env[65758]: WARNING openstack [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1092.317485] env[65758]: DEBUG oslo_vmware.api [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661177, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.401300] env[65758]: DEBUG nova.network.neutron [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updated VIF entry in instance network info cache for port 05e0fa46-1b67-477a-bc40-26c9641f6549. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1092.401790] env[65758]: DEBUG nova.network.neutron [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance_info_cache with network_info: [{"id": "05e0fa46-1b67-477a-bc40-26c9641f6549", "address": "fa:16:3e:6d:7e:f5", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05e0fa46-1b", "ovs_interfaceid": "05e0fa46-1b67-477a-bc40-26c9641f6549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1092.603367] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "63b744d2-541a-42e3-9717-b06a4459fd50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.603751] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.604114] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.604449] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.604929] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.611067] env[65758]: INFO nova.compute.manager [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Terminating instance [ 1092.620131] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661175, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.705069] env[65758]: INFO nova.compute.claims [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.737027] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4c6c1848-18b8-42f1-8f68-ef9f66add2a4 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.658s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.803967] env[65758]: WARNING neutronclient.v2_0.client [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1092.805064] env[65758]: WARNING openstack [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1092.805195] env[65758]: WARNING openstack [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1092.824072] env[65758]: DEBUG oslo_vmware.api [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661177, 'name': ReconfigVM_Task, 'duration_secs': 0.613532} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.824072] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfigured VM instance instance-0000005f to attach disk [datastore1] volume-da1ad087-09d2-4369-aa55-0371cd8a59fe/volume-da1ad087-09d2-4369-aa55-0371cd8a59fe.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.829034] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbce4e9c-a2eb-4eee-ae17-c649d1f01935 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.851096] env[65758]: DEBUG oslo_vmware.api [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1092.851096] env[65758]: value = "task-4661178" [ 1092.851096] env[65758]: _type = "Task" [ 1092.851096] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.864203] env[65758]: DEBUG oslo_vmware.api [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661178, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.905977] env[65758]: DEBUG oslo_concurrency.lockutils [req-240d4165-c9e4-4f0b-b86e-842b5a409744 req-2d371589-ef9b-421f-9a37-df710668064d service nova] Releasing lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.067587] env[65758]: WARNING neutronclient.v2_0.client [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1093.068717] env[65758]: WARNING openstack [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1093.069885] env[65758]: WARNING openstack [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1093.119052] env[65758]: DEBUG nova.compute.manager [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1093.119444] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1093.119829] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661175, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.120633] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d1082e-2343-4e90-b866-fe30189b81b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.129885] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1093.130323] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c1dd32d-59c1-45a4-95f2-853500f792f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.144298] env[65758]: DEBUG oslo_vmware.api [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1093.144298] env[65758]: value = "task-4661179" [ 1093.144298] env[65758]: _type = "Task" [ 1093.144298] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.155189] env[65758]: DEBUG oslo_vmware.api [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.189970] env[65758]: DEBUG nova.network.neutron [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [{"id": "2e41907c-1553-48df-9644-cb422d2f19df", "address": "fa:16:3e:b2:e3:b9", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e41907c-15", "ovs_interfaceid": "2e41907c-1553-48df-9644-cb422d2f19df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1093.211269] env[65758]: INFO nova.compute.resource_tracker [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating resource usage from migration 0d160ead-0c7c-422d-976f-8988ce7ea93d [ 1093.363417] env[65758]: DEBUG oslo_vmware.api [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661178, 'name': ReconfigVM_Task, 'duration_secs': 0.236063} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.363757] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910049', 'volume_id': 'da1ad087-09d2-4369-aa55-0371cd8a59fe', 'name': 'volume-da1ad087-09d2-4369-aa55-0371cd8a59fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ade1d760-e3e7-49c8-ba9d-b4829ca60841', 'attached_at': '', 'detached_at': '', 'volume_id': 'da1ad087-09d2-4369-aa55-0371cd8a59fe', 'serial': 'da1ad087-09d2-4369-aa55-0371cd8a59fe'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1093.438508] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707879ce-aa67-4660-9807-8a6435758ddb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.448140] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06e53e3-8243-4540-a1f9-d425e1a02605 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.482613] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7ed267-b56f-40d5-9734-b69607a6601c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.491067] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3e1a96-8ab5-48c6-913e-264c7d260358 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.506490] env[65758]: DEBUG nova.compute.provider_tree [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1093.536527] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "8f7c865d-1207-4300-b721-25b196f7a2f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.536695] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "8f7c865d-1207-4300-b721-25b196f7a2f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.618383] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661175, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.654707] env[65758]: DEBUG oslo_vmware.api [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661179, 'name': PowerOffVM_Task, 'duration_secs': 0.224829} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.655015] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1093.655316] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1093.655590] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fc56b7a-5bb5-495e-b1d9-5d7a5e9eab81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.693088] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.693415] env[65758]: DEBUG nova.objects.instance [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'migration_context' on Instance uuid 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.722611] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1093.722973] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1093.723180] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleting the datastore file [datastore2] 63b744d2-541a-42e3-9717-b06a4459fd50 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1093.723486] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1d34bf3-1205-4f26-bd41-2adf1a0be62b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.730354] env[65758]: DEBUG oslo_vmware.api [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for the task: (returnval){ [ 1093.730354] env[65758]: value = "task-4661181" [ 1093.730354] env[65758]: _type = "Task" [ 1093.730354] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.739518] env[65758]: DEBUG oslo_vmware.api [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661181, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.027892] env[65758]: ERROR nova.scheduler.client.report [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [req-ddebc9c3-591e-43ce-9293-18b48f10f90c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ddebc9c3-591e-43ce-9293-18b48f10f90c"}]} [ 1094.039207] env[65758]: DEBUG nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1094.046611] env[65758]: DEBUG nova.scheduler.client.report [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1094.061824] env[65758]: DEBUG nova.scheduler.client.report [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1094.062199] env[65758]: DEBUG nova.compute.provider_tree [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.074244] env[65758]: DEBUG nova.scheduler.client.report [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: None {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1094.093891] env[65758]: DEBUG nova.scheduler.client.report [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1094.120912] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661175, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.528315} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.121332] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8/OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8.vmdk to [datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985/2edcb03c-85ab-4d21-8ff4-b3b47fae6985.vmdk. [ 1094.121388] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Cleaning up location [datastore1] OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8 {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1094.121519] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a278f987-d1bd-47c9-8f76-41d08c1a36f8 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1094.122336] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6066935b-6bef-40cf-b609-0365ab6e2f7c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.128978] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1094.128978] env[65758]: value = "task-4661182" [ 1094.128978] env[65758]: _type = "Task" [ 1094.128978] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.141824] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.196814] env[65758]: DEBUG nova.objects.base [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Object Instance<76ec31e6-65c2-4290-9ec0-b274be95baa4> lazy-loaded attributes: info_cache,migration_context {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1094.198014] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61823129-27ab-44ab-855c-85cd1c0488ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.220936] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd2efff3-b2ee-4e35-a4c2-4efb3fd08a7a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.227827] env[65758]: DEBUG oslo_vmware.api [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1094.227827] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524547eb-4a95-501a-1c55-4e6c967016c9" [ 1094.227827] env[65758]: _type = "Task" [ 1094.227827] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.241511] env[65758]: DEBUG oslo_vmware.api [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524547eb-4a95-501a-1c55-4e6c967016c9, 'name': SearchDatastore_Task, 'duration_secs': 0.007663} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.241864] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.246442] env[65758]: DEBUG oslo_vmware.api [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Task: {'id': task-4661181, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142704} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.246748] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1094.246961] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1094.247179] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1094.247384] env[65758]: INFO nova.compute.manager [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1094.247648] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1094.248087] env[65758]: DEBUG nova.compute.manager [-] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1094.248087] env[65758]: DEBUG nova.network.neutron [-] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1094.248226] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1094.248795] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1094.249143] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1094.290899] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1094.345465] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ac766e-9b6c-470c-bac3-065483b3aef9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.354892] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9562d81-4961-4522-8fb1-a368932e73a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.387608] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cf5cda-9947-42d5-b10c-0606e2d1a0e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.396443] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde55add-a6dc-4f95-b95f-596105c8d206 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.413363] env[65758]: DEBUG nova.objects.instance [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid ade1d760-e3e7-49c8-ba9d-b4829ca60841 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.414931] env[65758]: DEBUG nova.compute.provider_tree [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.541939] env[65758]: DEBUG nova.compute.manager [req-3dd15b8f-b854-4e57-bba5-f1a5e06e026c req-b5e3b1c8-458f-4336-b766-0be4503786db service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Received event network-vif-deleted-83c394c9-9b0d-40ad-923c-00e70d63c85a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1094.542123] env[65758]: INFO nova.compute.manager [req-3dd15b8f-b854-4e57-bba5-f1a5e06e026c req-b5e3b1c8-458f-4336-b766-0be4503786db service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Neutron deleted interface 83c394c9-9b0d-40ad-923c-00e70d63c85a; detaching it from the instance and deleting it from the info cache [ 1094.542372] env[65758]: DEBUG nova.network.neutron [req-3dd15b8f-b854-4e57-bba5-f1a5e06e026c req-b5e3b1c8-458f-4336-b766-0be4503786db service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1094.562772] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.640655] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043062} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.640919] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1094.641095] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985/2edcb03c-85ab-4d21-8ff4-b3b47fae6985.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.641348] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985/2edcb03c-85ab-4d21-8ff4-b3b47fae6985.vmdk to [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24/afc1eb16-c275-4b3b-a7fe-9938d2241e24.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1094.641607] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef9e7be0-c507-4fc3-b48a-4f7ebed94f34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.650354] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1094.650354] env[65758]: value = "task-4661183" [ 1094.650354] env[65758]: _type = "Task" [ 1094.650354] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.660762] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.919942] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d724101e-f860-43ea-b707-1b24520a9e01 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.837s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.954118] env[65758]: DEBUG nova.scheduler.client.report [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1094.954498] env[65758]: DEBUG nova.compute.provider_tree [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 134 to 135 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1094.954685] env[65758]: DEBUG nova.compute.provider_tree [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1095.015117] env[65758]: DEBUG nova.network.neutron [-] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1095.047511] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f95d070-06e4-43b7-a021-fe5b119a338f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.060568] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a62316-48ce-4986-916e-0d8166355eb3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.101209] env[65758]: DEBUG nova.compute.manager [req-3dd15b8f-b854-4e57-bba5-f1a5e06e026c req-b5e3b1c8-458f-4336-b766-0be4503786db service nova] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Detach interface failed, port_id=83c394c9-9b0d-40ad-923c-00e70d63c85a, reason: Instance 63b744d2-541a-42e3-9717-b06a4459fd50 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1095.166216] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661183, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.291802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.291912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.461149] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.261s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.461430] env[65758]: INFO nova.compute.manager [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Migrating [ 1095.469526] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.236s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.471467] env[65758]: INFO nova.compute.claims [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1095.518035] env[65758]: INFO nova.compute.manager [-] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Took 1.27 seconds to deallocate network for instance. [ 1095.667021] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661183, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.797526] env[65758]: DEBUG nova.compute.utils [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1095.985929] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.986417] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.986417] env[65758]: DEBUG nova.network.neutron [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1096.028255] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.165469] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661183, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.301491] env[65758]: DEBUG oslo_concurrency.lockutils [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.310505] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "974d06c1-2704-4a78-bbd7-f54335c4288e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.310892] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "974d06c1-2704-4a78-bbd7-f54335c4288e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.311206] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "974d06c1-2704-4a78-bbd7-f54335c4288e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.311464] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "974d06c1-2704-4a78-bbd7-f54335c4288e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.311707] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "974d06c1-2704-4a78-bbd7-f54335c4288e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.314790] env[65758]: INFO nova.compute.manager [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Terminating instance [ 1096.491328] env[65758]: WARNING neutronclient.v2_0.client [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1096.492122] env[65758]: WARNING openstack [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1096.492811] env[65758]: WARNING openstack [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1096.668663] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661183, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.713914] env[65758]: WARNING neutronclient.v2_0.client [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1096.714791] env[65758]: WARNING openstack [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1096.715187] env[65758]: WARNING openstack [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1096.738146] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fc0285-d300-4c45-8883-0943ed33e78b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.749619] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a88af20-8138-49bf-9701-0c6505cbd707 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.795361] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4f06f8-5e43-4e13-899e-61aece2912b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.806813] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3fcf63-3a76-44b0-abb0-1903777eea73 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.825326] env[65758]: DEBUG nova.compute.manager [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1096.825597] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.826150] env[65758]: DEBUG nova.compute.provider_tree [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.828386] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec62e1ff-0919-4842-b198-a59944f7b88b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.840229] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.842764] env[65758]: DEBUG nova.network.neutron [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance_info_cache with network_info: [{"id": "05e0fa46-1b67-477a-bc40-26c9641f6549", "address": "fa:16:3e:6d:7e:f5", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05e0fa46-1b", "ovs_interfaceid": "05e0fa46-1b67-477a-bc40-26c9641f6549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1096.844345] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b008ab5f-913b-4034-a61c-76193bfb070b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.857854] env[65758]: DEBUG oslo_vmware.api [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1096.857854] env[65758]: value = "task-4661184" [ 1096.857854] env[65758]: _type = "Task" [ 1096.857854] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.870869] env[65758]: DEBUG oslo_vmware.api [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661184, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.163546] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661183, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.346202} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.163844] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2edcb03c-85ab-4d21-8ff4-b3b47fae6985/2edcb03c-85ab-4d21-8ff4-b3b47fae6985.vmdk to [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24/afc1eb16-c275-4b3b-a7fe-9938d2241e24.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1097.164677] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e783dec-f9db-465d-9c24-d154445a6404 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.187161] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24/afc1eb16-c275-4b3b-a7fe-9938d2241e24.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.187472] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f5e0384-14fc-461f-8c0a-85864b468e01 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.208096] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1097.208096] env[65758]: value = "task-4661185" [ 1097.208096] env[65758]: _type = "Task" [ 1097.208096] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.219135] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661185, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.334118] env[65758]: DEBUG nova.scheduler.client.report [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1097.347222] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.368795] env[65758]: DEBUG oslo_vmware.api [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661184, 'name': PowerOffVM_Task, 'duration_secs': 0.277713} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.369081] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.369245] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1097.369493] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63a8a883-bbea-4c2e-a204-dd963946ea1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.387519] env[65758]: DEBUG oslo_concurrency.lockutils [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.387775] env[65758]: DEBUG oslo_concurrency.lockutils [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.388027] env[65758]: INFO nova.compute.manager [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Attaching volume 521fcd20-a69b-4128-829d-bb436cf0d10d to /dev/sdc [ 1097.421024] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d0f46f-0e75-4802-b4ad-aff7d751c1ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.430872] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a997fa-4528-4418-9032-73b6b64ff260 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.435018] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1097.435321] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1097.435474] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleting the datastore file [datastore2] 974d06c1-2704-4a78-bbd7-f54335c4288e {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.436175] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30f59635-3c63-484e-a557-1519c15e4560 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.443049] env[65758]: DEBUG oslo_vmware.api [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1097.443049] env[65758]: value = "task-4661187" [ 1097.443049] env[65758]: _type = "Task" [ 1097.443049] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.449804] env[65758]: DEBUG nova.virt.block_device [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updating existing volume attachment record: 52ea0878-9271-4b18-b531-0637ba2890a1 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1097.457615] env[65758]: DEBUG oslo_vmware.api [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.718894] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661185, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.839367] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.840088] env[65758]: DEBUG nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1097.843778] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.602s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.955199] env[65758]: DEBUG oslo_vmware.api [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22059} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.955514] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.955701] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.955874] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.956056] env[65758]: INFO nova.compute.manager [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1097.956327] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1097.956550] env[65758]: DEBUG nova.compute.manager [-] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1097.956697] env[65758]: DEBUG nova.network.neutron [-] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1097.956918] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1097.957477] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1097.957745] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1098.034459] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1098.219729] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661185, 'name': ReconfigVM_Task, 'duration_secs': 0.74197} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.219930] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Reconfigured VM instance instance-00000057 to attach disk [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24/afc1eb16-c275-4b3b-a7fe-9938d2241e24.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.221156] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_options': None, 'guest_format': None, 'encrypted': False, 'encryption_secret_uuid': None, 'device_type': 'disk', 'encryption_format': None, 'device_name': '/dev/sda', 'size': 0, 'disk_bus': None, 'boot_index': 0, 'image_id': '75a6399b-5100-4c51-b5cf-162bd505a28f'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910042', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'name': 'volume-584fc235-4162-403c-abe8-2188f52e0331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'afc1eb16-c275-4b3b-a7fe-9938d2241e24', 'attached_at': '', 'detached_at': '', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'serial': '584fc235-4162-403c-abe8-2188f52e0331'}, 'attachment_id': '964bf65d-b6cc-4088-9faa-62a2df588a59', 'disk_bus': None, 'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=65758) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1098.221409] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1098.221655] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910042', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'name': 'volume-584fc235-4162-403c-abe8-2188f52e0331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'afc1eb16-c275-4b3b-a7fe-9938d2241e24', 'attached_at': '', 'detached_at': '', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'serial': '584fc235-4162-403c-abe8-2188f52e0331'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1098.222576] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5b8493-5b43-4a78-8262-e6f6be5a4ecd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.243402] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a206cf6-7cf2-4b28-8999-2b38a93b4ac1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.272747] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] volume-584fc235-4162-403c-abe8-2188f52e0331/volume-584fc235-4162-403c-abe8-2188f52e0331.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.274950] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdb2a602-1e6d-4dc1-97e5-971a9afed80f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.293105] env[65758]: DEBUG nova.compute.manager [req-5782db51-1e48-4f23-a68a-d50ecaeb3153 req-aad95105-9879-4a07-aa31-e75d6697f5d9 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Received event network-vif-deleted-bc11b657-640b-458c-9870-62fd7fdbe88a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1098.293335] env[65758]: INFO nova.compute.manager [req-5782db51-1e48-4f23-a68a-d50ecaeb3153 req-aad95105-9879-4a07-aa31-e75d6697f5d9 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Neutron deleted interface bc11b657-640b-458c-9870-62fd7fdbe88a; detaching it from the instance and deleting it from the info cache [ 1098.293510] env[65758]: DEBUG nova.network.neutron [req-5782db51-1e48-4f23-a68a-d50ecaeb3153 req-aad95105-9879-4a07-aa31-e75d6697f5d9 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1098.300538] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1098.300538] env[65758]: value = "task-4661189" [ 1098.300538] env[65758]: _type = "Task" [ 1098.300538] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.311285] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661189, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.347761] env[65758]: DEBUG nova.compute.utils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1098.353048] env[65758]: DEBUG nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1098.353349] env[65758]: DEBUG nova.network.neutron [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1098.353831] env[65758]: WARNING neutronclient.v2_0.client [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1098.354421] env[65758]: WARNING neutronclient.v2_0.client [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1098.355136] env[65758]: WARNING openstack [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1098.355381] env[65758]: WARNING openstack [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1098.415917] env[65758]: DEBUG nova.policy [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1098.578995] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19882482-4500-4289-aa87-5377e5c0fee7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.587985] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5c879a-f011-49b8-8516-bbfd474aae52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.621342] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60011587-8aae-4e37-aacc-607bc13550af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.630668] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fbf53c-a3ee-4229-8dba-cde605fc942a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.646145] env[65758]: DEBUG nova.compute.provider_tree [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.740355] env[65758]: DEBUG nova.network.neutron [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Successfully created port: 4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1098.791998] env[65758]: DEBUG nova.network.neutron [-] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1098.796538] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e079a77-a942-4969-8d33-71e4518615d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.810318] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a5d9ee-63ba-4489-a980-f89c8f59afa1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.826156] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661189, 'name': ReconfigVM_Task, 'duration_secs': 0.497851} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.826904] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Reconfigured VM instance instance-00000057 to attach disk [datastore1] volume-584fc235-4162-403c-abe8-2188f52e0331/volume-584fc235-4162-403c-abe8-2188f52e0331.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.832768] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eda41c83-c96e-40fc-b86f-f087985f44c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.860388] env[65758]: DEBUG nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1098.864246] env[65758]: DEBUG nova.compute.manager [req-5782db51-1e48-4f23-a68a-d50ecaeb3153 req-aad95105-9879-4a07-aa31-e75d6697f5d9 service nova] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Detach interface failed, port_id=bc11b657-640b-458c-9870-62fd7fdbe88a, reason: Instance 974d06c1-2704-4a78-bbd7-f54335c4288e could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1098.873244] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e079a5dd-b66e-4731-86e7-61529d4ee719 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.876578] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1098.876578] env[65758]: value = "task-4661190" [ 1098.876578] env[65758]: _type = "Task" [ 1098.876578] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.897123] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance '149655f8-fcf5-4cfe-ab96-1171b9d3b550' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1098.908774] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661190, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.149062] env[65758]: DEBUG nova.scheduler.client.report [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.296401] env[65758]: INFO nova.compute.manager [-] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Took 1.34 seconds to deallocate network for instance. [ 1099.389892] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661190, 'name': ReconfigVM_Task, 'duration_secs': 0.235022} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.390238] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910042', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'name': 'volume-584fc235-4162-403c-abe8-2188f52e0331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'afc1eb16-c275-4b3b-a7fe-9938d2241e24', 'attached_at': '', 'detached_at': '', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'serial': '584fc235-4162-403c-abe8-2188f52e0331'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1099.390903] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e665fda-504f-4e1d-a425-b80649894815 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.400153] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1099.400153] env[65758]: value = "task-4661191" [ 1099.400153] env[65758]: _type = "Task" [ 1099.400153] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.407116] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1099.408509] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb161de1-f156-4d17-b5d5-e6335bc0c167 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.413877] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661191, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.420730] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1099.420730] env[65758]: value = "task-4661192" [ 1099.420730] env[65758]: _type = "Task" [ 1099.420730] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.429871] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.803101] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.809944] env[65758]: DEBUG oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526f1146-83f7-5fa2-d70b-078ae8121b53/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1099.811035] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088f3058-ae2f-4a2c-b475-1cad35ae7918 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.821033] env[65758]: DEBUG oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526f1146-83f7-5fa2-d70b-078ae8121b53/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1099.821033] env[65758]: ERROR oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526f1146-83f7-5fa2-d70b-078ae8121b53/disk-0.vmdk due to incomplete transfer. [ 1099.821198] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3d9fc86c-0fbf-45f2-8f23-6b14d0c8984d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.830225] env[65758]: DEBUG oslo_vmware.rw_handles [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526f1146-83f7-5fa2-d70b-078ae8121b53/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1099.830457] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Uploaded image b87c38de-e73b-49dc-a7dd-1e776ee516c5 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1099.833353] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1099.833701] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-83545605-b55d-414d-b1ad-4041b0ed23e1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.841479] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1099.841479] env[65758]: value = "task-4661194" [ 1099.841479] env[65758]: _type = "Task" [ 1099.841479] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.852459] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661194, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.872172] env[65758]: DEBUG nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1099.899454] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1099.899761] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1099.899962] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1099.900198] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1099.900365] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1099.900544] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1099.900786] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1099.900984] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1099.901224] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1099.901418] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1099.901617] env[65758]: DEBUG nova.virt.hardware [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1099.902671] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7787c0-b879-41bb-a867-e221277ac4ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.917893] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7c85c8-34ec-42a4-a541-a8ca77174469 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.922039] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661191, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.938950] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.159685] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.316s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.159939] env[65758]: DEBUG nova.compute.manager [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=65758) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5416}} [ 1100.163117] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.600s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.164802] env[65758]: INFO nova.compute.claims [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.321162] env[65758]: DEBUG nova.compute.manager [req-a8ea28b7-1efb-43c9-b8de-54fa22a8bfd0 req-18c628a7-52bc-4bf8-af85-b204c06d0d40 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-vif-plugged-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1100.321595] env[65758]: DEBUG oslo_concurrency.lockutils [req-a8ea28b7-1efb-43c9-b8de-54fa22a8bfd0 req-18c628a7-52bc-4bf8-af85-b204c06d0d40 service nova] Acquiring lock "62ae50af-ff52-4084-8161-1a650eff5247-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.321892] env[65758]: DEBUG oslo_concurrency.lockutils [req-a8ea28b7-1efb-43c9-b8de-54fa22a8bfd0 req-18c628a7-52bc-4bf8-af85-b204c06d0d40 service nova] Lock "62ae50af-ff52-4084-8161-1a650eff5247-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.322130] env[65758]: DEBUG oslo_concurrency.lockutils [req-a8ea28b7-1efb-43c9-b8de-54fa22a8bfd0 req-18c628a7-52bc-4bf8-af85-b204c06d0d40 service nova] Lock "62ae50af-ff52-4084-8161-1a650eff5247-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.322412] env[65758]: DEBUG nova.compute.manager [req-a8ea28b7-1efb-43c9-b8de-54fa22a8bfd0 req-18c628a7-52bc-4bf8-af85-b204c06d0d40 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] No waiting events found dispatching network-vif-plugged-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1100.322491] env[65758]: WARNING nova.compute.manager [req-a8ea28b7-1efb-43c9-b8de-54fa22a8bfd0 req-18c628a7-52bc-4bf8-af85-b204c06d0d40 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received unexpected event network-vif-plugged-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 for instance with vm_state building and task_state spawning. [ 1100.326119] env[65758]: DEBUG nova.network.neutron [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Successfully updated port: 4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1100.355989] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661194, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.412291] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661191, 'name': Rename_Task, 'duration_secs': 0.944679} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.412590] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1100.412840] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73afb218-af1f-4b0d-b776-efe994e0fb97 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.421352] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1100.421352] env[65758]: value = "task-4661195" [ 1100.421352] env[65758]: _type = "Task" [ 1100.421352] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.432242] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661192, 'name': PowerOffVM_Task, 'duration_secs': 0.912723} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.435368] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1100.435561] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance '149655f8-fcf5-4cfe-ab96-1171b9d3b550' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1100.438913] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661195, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.730580] env[65758]: INFO nova.scheduler.client.report [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted allocation for migration c2b17f22-6bec-4cfa-bbde-36c745a9c6b9 [ 1100.829326] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.829326] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.829428] env[65758]: DEBUG nova.network.neutron [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1100.854545] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661194, 'name': Destroy_Task, 'duration_secs': 0.845546} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.854830] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Destroyed the VM [ 1100.855097] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1100.855409] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4de79dd9-0d54-45c2-9e83-9fbde8581d8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.863755] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1100.863755] env[65758]: value = "task-4661196" [ 1100.863755] env[65758]: _type = "Task" [ 1100.863755] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.873704] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661196, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.933430] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661195, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.941813] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1100.942107] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1100.942290] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1100.942476] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1100.942658] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1100.942824] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1100.943052] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1100.943219] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1100.943403] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1100.943552] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1100.943751] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1100.950265] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15ffb993-6075-40ce-994c-86374854c425 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.968826] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1100.968826] env[65758]: value = "task-4661197" [ 1100.968826] env[65758]: _type = "Task" [ 1100.968826] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.980259] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.237449] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73bbf12d-62c6-45f1-b313-586d748bae51 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.045s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.332382] env[65758]: WARNING openstack [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1101.332923] env[65758]: WARNING openstack [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1101.374149] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661196, 'name': RemoveSnapshot_Task} progress is 56%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.379580] env[65758]: DEBUG nova.network.neutron [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1101.399293] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d80e272-2107-41f1-a72a-a2adef8d77c3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.409323] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b202df86-ae0f-42ae-a71e-fd4df305cc52 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.448202] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3826009a-d8fa-48c7-801f-a3f34c2c8402 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.459165] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002f6f74-eb7e-4f53-b30e-f5e239a8b78a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.462883] env[65758]: DEBUG oslo_vmware.api [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661195, 'name': PowerOnVM_Task, 'duration_secs': 0.563741} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.463156] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1101.475932] env[65758]: DEBUG nova.compute.provider_tree [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.482041] env[65758]: WARNING neutronclient.v2_0.client [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1101.482721] env[65758]: WARNING openstack [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1101.483116] env[65758]: WARNING openstack [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1101.495392] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661197, 'name': ReconfigVM_Task, 'duration_secs': 0.152839} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.495392] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance '149655f8-fcf5-4cfe-ab96-1171b9d3b550' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1101.575788] env[65758]: DEBUG nova.compute.manager [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1101.576974] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f588b5fc-9af0-42b3-be12-57c07553d30c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.583340] env[65758]: DEBUG nova.network.neutron [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1101.749721] env[65758]: DEBUG nova.objects.instance [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'flavor' on Instance uuid 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.875313] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661196, 'name': RemoveSnapshot_Task} progress is 56%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.981678] env[65758]: DEBUG nova.scheduler.client.report [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.997607] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1101.997847] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910051', 'volume_id': '521fcd20-a69b-4128-829d-bb436cf0d10d', 'name': 'volume-521fcd20-a69b-4128-829d-bb436cf0d10d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ade1d760-e3e7-49c8-ba9d-b4829ca60841', 'attached_at': '', 'detached_at': '', 'volume_id': '521fcd20-a69b-4128-829d-bb436cf0d10d', 'serial': '521fcd20-a69b-4128-829d-bb436cf0d10d'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1101.998992] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59779a2c-2845-47c7-9f61-9b3f8997c694 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.003881] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1102.004253] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1102.004475] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1102.004751] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1102.004930] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1102.005101] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1102.005331] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1102.005491] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1102.005656] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1102.005814] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1102.005982] env[65758]: DEBUG nova.virt.hardware [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1102.011225] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1102.011800] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ef13203-9bd3-40e5-be4c-1a2fed6265c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.042478] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31786314-05c0-4bf3-a816-13793b574198 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.045310] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1102.045310] env[65758]: value = "task-4661198" [ 1102.045310] env[65758]: _type = "Task" [ 1102.045310] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.074406] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] volume-521fcd20-a69b-4128-829d-bb436cf0d10d/volume-521fcd20-a69b-4128-829d-bb436cf0d10d.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.075581] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42ea5d26-2617-47b0-bca7-045a256225fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.092148] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.092563] env[65758]: DEBUG nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Instance network_info: |[{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1102.092902] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.096909] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:39:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b156aab-9aa2-46c6-8e9f-b9912654dcc0', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1102.104134] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1102.106682] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1102.107149] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb4b46f-d348-4b86-b624-7a59ffb3299c tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 33.176s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.107954] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2d8fa93-423c-4abc-88f2-61e0fc361458 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.125886] env[65758]: DEBUG oslo_vmware.api [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1102.125886] env[65758]: value = "task-4661199" [ 1102.125886] env[65758]: _type = "Task" [ 1102.125886] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.132177] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1102.132177] env[65758]: value = "task-4661200" [ 1102.132177] env[65758]: _type = "Task" [ 1102.132177] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.139353] env[65758]: DEBUG oslo_vmware.api [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.145029] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661200, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.255110] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.255312] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.255513] env[65758]: DEBUG nova.network.neutron [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1102.255706] env[65758]: DEBUG nova.objects.instance [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'info_cache' on Instance uuid 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.359899] env[65758]: DEBUG nova.compute.manager [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1102.359899] env[65758]: DEBUG nova.compute.manager [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing instance network info cache due to event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1102.359899] env[65758]: DEBUG oslo_concurrency.lockutils [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.360438] env[65758]: DEBUG oslo_concurrency.lockutils [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.360438] env[65758]: DEBUG nova.network.neutron [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1102.374259] env[65758]: DEBUG oslo_vmware.api [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661196, 'name': RemoveSnapshot_Task, 'duration_secs': 1.233818} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.374597] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1102.374888] env[65758]: INFO nova.compute.manager [None req-33469ad8-9325-4e51-ad5e-e63f51e399d0 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Took 18.22 seconds to snapshot the instance on the hypervisor. [ 1102.487977] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.488749] env[65758]: DEBUG nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1102.492204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.464s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.492518] env[65758]: DEBUG nova.objects.instance [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lazy-loading 'resources' on Instance uuid 63b744d2-541a-42e3-9717-b06a4459fd50 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.556468] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661198, 'name': ReconfigVM_Task, 'duration_secs': 0.266115} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.556839] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1102.557642] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8268da4a-93b8-42af-9e24-6040be7cf0b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.582312] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1/volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.582618] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-449b0978-e693-422f-9f23-640a18acb891 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.601509] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1102.601509] env[65758]: value = "task-4661201" [ 1102.601509] env[65758]: _type = "Task" [ 1102.601509] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.610861] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661201, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.636472] env[65758]: DEBUG oslo_vmware.api [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661199, 'name': ReconfigVM_Task, 'duration_secs': 0.439031} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.639894] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfigured VM instance instance-0000005f to attach disk [datastore1] volume-521fcd20-a69b-4128-829d-bb436cf0d10d/volume-521fcd20-a69b-4128-829d-bb436cf0d10d.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.644551] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86bcc722-cdb5-4130-b954-ec39191d41ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.661288] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661200, 'name': CreateVM_Task, 'duration_secs': 0.496222} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.662518] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1102.662856] env[65758]: DEBUG oslo_vmware.api [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1102.662856] env[65758]: value = "task-4661202" [ 1102.662856] env[65758]: _type = "Task" [ 1102.662856] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.663325] env[65758]: WARNING neutronclient.v2_0.client [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1102.663668] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.663813] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.664135] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1102.664471] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5361b4b8-f971-4833-9141-729d58116f3c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.672675] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1102.672675] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5207b72d-bb42-c23e-0548-3ef410acd7b1" [ 1102.672675] env[65758]: _type = "Task" [ 1102.672675] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.677600] env[65758]: DEBUG oslo_vmware.api [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661202, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.688559] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5207b72d-bb42-c23e-0548-3ef410acd7b1, 'name': SearchDatastore_Task, 'duration_secs': 0.012939} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.688888] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.689160] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1102.689419] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.689568] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.689763] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.690058] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cab9b65-a374-49e9-9b1d-30e090bbd482 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.698993] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.699196] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.699957] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c53128a-4a09-4523-94c6-b55543122143 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.706140] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1102.706140] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e9a116-8aff-1db1-fe93-7b685ac9adb4" [ 1102.706140] env[65758]: _type = "Task" [ 1102.706140] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.714796] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e9a116-8aff-1db1-fe93-7b685ac9adb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.759321] env[65758]: DEBUG nova.objects.base [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Object Instance<76ec31e6-65c2-4290-9ec0-b274be95baa4> lazy-loaded attributes: flavor,info_cache {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1102.863609] env[65758]: WARNING neutronclient.v2_0.client [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1102.864458] env[65758]: WARNING openstack [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1102.864861] env[65758]: WARNING openstack [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1102.997232] env[65758]: DEBUG nova.compute.utils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1103.001531] env[65758]: DEBUG nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1103.001638] env[65758]: DEBUG nova.network.neutron [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1103.002427] env[65758]: WARNING neutronclient.v2_0.client [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1103.002427] env[65758]: WARNING neutronclient.v2_0.client [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1103.002954] env[65758]: WARNING openstack [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1103.003658] env[65758]: WARNING openstack [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1103.043683] env[65758]: WARNING neutronclient.v2_0.client [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1103.044660] env[65758]: WARNING openstack [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1103.046062] env[65758]: WARNING openstack [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1103.057543] env[65758]: DEBUG nova.policy [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd07b5ba2c3ef430293fbf39148961763', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bad3e3c7054c424a800cb12e9c5dbb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1103.116457] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661201, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.177248] env[65758]: DEBUG oslo_vmware.api [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661202, 'name': ReconfigVM_Task, 'duration_secs': 0.150205} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.177496] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910051', 'volume_id': '521fcd20-a69b-4128-829d-bb436cf0d10d', 'name': 'volume-521fcd20-a69b-4128-829d-bb436cf0d10d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ade1d760-e3e7-49c8-ba9d-b4829ca60841', 'attached_at': '', 'detached_at': '', 'volume_id': '521fcd20-a69b-4128-829d-bb436cf0d10d', 'serial': '521fcd20-a69b-4128-829d-bb436cf0d10d'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1103.193076] env[65758]: DEBUG nova.network.neutron [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updated VIF entry in instance network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1103.193539] env[65758]: DEBUG nova.network.neutron [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1103.223047] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e9a116-8aff-1db1-fe93-7b685ac9adb4, 'name': SearchDatastore_Task, 'duration_secs': 0.009628} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.224284] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0db8cfa5-0792-49e0-994e-f376573aaedf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.235432] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1103.235432] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d46e55-b5ac-f372-909b-5cb53d081ad7" [ 1103.235432] env[65758]: _type = "Task" [ 1103.235432] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.237303] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19928b3c-fd54-4de0-a4b9-ab5bfb77aff3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.251974] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d46e55-b5ac-f372-909b-5cb53d081ad7, 'name': SearchDatastore_Task, 'duration_secs': 0.011034} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.254083] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.254409] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 62ae50af-ff52-4084-8161-1a650eff5247/62ae50af-ff52-4084-8161-1a650eff5247.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1103.254726] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a279bb27-49ad-4b86-ad1a-0eb574bd0714 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.257336] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6e8c84-1c0b-4e95-acdf-0c44721256eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.262203] env[65758]: WARNING neutronclient.v2_0.client [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1103.262853] env[65758]: WARNING openstack [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1103.263326] env[65758]: WARNING openstack [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1103.306743] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1139cf32-ddd6-4891-a3fe-755235ee8528 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.309685] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1103.309685] env[65758]: value = "task-4661203" [ 1103.309685] env[65758]: _type = "Task" [ 1103.309685] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.320055] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da28b6f-5cdc-4711-a940-b86fc616e417 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.327887] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661203, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.339500] env[65758]: DEBUG nova.compute.provider_tree [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.379659] env[65758]: DEBUG nova.network.neutron [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Successfully created port: eca37d97-ed85-4bcf-b389-e161b7507b5b {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1103.460383] env[65758]: WARNING neutronclient.v2_0.client [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1103.461105] env[65758]: WARNING openstack [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1103.461460] env[65758]: WARNING openstack [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1103.505171] env[65758]: DEBUG nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1103.577670] env[65758]: DEBUG nova.network.neutron [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [{"id": "2e41907c-1553-48df-9644-cb422d2f19df", "address": "fa:16:3e:b2:e3:b9", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e41907c-15", "ovs_interfaceid": "2e41907c-1553-48df-9644-cb422d2f19df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1103.616857] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661201, 'name': ReconfigVM_Task, 'duration_secs': 0.64513} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.617912] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Reconfigured VM instance instance-00000064 to attach disk [datastore2] volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1/volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.618224] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance '149655f8-fcf5-4cfe-ab96-1171b9d3b550' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1103.696308] env[65758]: DEBUG oslo_concurrency.lockutils [req-9df530cb-55aa-4f1d-ab56-26de21f1f5db req-d6d85197-2c2d-4261-9d57-57c9bd717606 service nova] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.821664] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661203, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.842882] env[65758]: DEBUG nova.scheduler.client.report [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.957630] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquiring lock "a014debf-2f16-4b30-af78-27a6751060de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.957972] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "a014debf-2f16-4b30-af78-27a6751060de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.958334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquiring lock "a014debf-2f16-4b30-af78-27a6751060de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.958636] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "a014debf-2f16-4b30-af78-27a6751060de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.958865] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "a014debf-2f16-4b30-af78-27a6751060de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.961519] env[65758]: INFO nova.compute.manager [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Terminating instance [ 1104.082110] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-76ec31e6-65c2-4290-9ec0-b274be95baa4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.124934] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869c6999-8716-4ac3-9ea4-8940ad9622ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.146762] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbbfa28-678d-4c8f-82f4-e54cf28d8fd6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.165856] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance '149655f8-fcf5-4cfe-ab96-1171b9d3b550' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1104.228573] env[65758]: DEBUG nova.objects.instance [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid ade1d760-e3e7-49c8-ba9d-b4829ca60841 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.323617] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661203, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.348970] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.351596] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.548s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.352174] env[65758]: DEBUG nova.objects.instance [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lazy-loading 'resources' on Instance uuid 974d06c1-2704-4a78-bbd7-f54335c4288e {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.374176] env[65758]: INFO nova.scheduler.client.report [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Deleted allocations for instance 63b744d2-541a-42e3-9717-b06a4459fd50 [ 1104.467080] env[65758]: DEBUG nova.compute.manager [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1104.467080] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1104.467700] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5dbe1c-81c5-4d21-930c-579fba245398 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.478397] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.478667] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31fabb68-9d16-41fa-978a-139f56f6e1a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.485767] env[65758]: DEBUG oslo_vmware.api [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1104.485767] env[65758]: value = "task-4661204" [ 1104.485767] env[65758]: _type = "Task" [ 1104.485767] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.494883] env[65758]: DEBUG oslo_vmware.api [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.516316] env[65758]: DEBUG nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1104.543751] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1104.544011] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1104.544174] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1104.544382] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1104.544529] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1104.544671] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1104.544871] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1104.545034] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1104.545325] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1104.545421] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1104.545502] env[65758]: DEBUG nova.virt.hardware [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1104.546699] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7751a891-f7dc-47b8-be14-8915890e13cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.555709] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254ad781-f275-49b7-b549-daef45e09834 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.734353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-005ca06d-1176-4b02-a3d3-af4a42766cd8 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.346s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.823568] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661203, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.502104} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.823844] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 62ae50af-ff52-4084-8161-1a650eff5247/62ae50af-ff52-4084-8161-1a650eff5247.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1104.824118] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1104.824380] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb5caf59-317e-4ad7-bf17-c5d3d010ee3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.832956] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1104.832956] env[65758]: value = "task-4661205" [ 1104.832956] env[65758]: _type = "Task" [ 1104.832956] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.843899] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661205, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.885319] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6719ba08-8474-4017-9547-ac485bad18da tempest-ServersNegativeTestJSON-165210990 tempest-ServersNegativeTestJSON-165210990-project-member] Lock "63b744d2-541a-42e3-9717-b06a4459fd50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.281s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.897956] env[65758]: DEBUG nova.compute.manager [req-08218721-50f5-4b34-92e5-f74578eb2097 req-d5dfbed6-3737-4e10-ba85-be6453c4fa69 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Received event network-vif-plugged-eca37d97-ed85-4bcf-b389-e161b7507b5b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1104.898344] env[65758]: DEBUG oslo_concurrency.lockutils [req-08218721-50f5-4b34-92e5-f74578eb2097 req-d5dfbed6-3737-4e10-ba85-be6453c4fa69 service nova] Acquiring lock "8f7c865d-1207-4300-b721-25b196f7a2f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.898422] env[65758]: DEBUG oslo_concurrency.lockutils [req-08218721-50f5-4b34-92e5-f74578eb2097 req-d5dfbed6-3737-4e10-ba85-be6453c4fa69 service nova] Lock "8f7c865d-1207-4300-b721-25b196f7a2f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.899028] env[65758]: DEBUG oslo_concurrency.lockutils [req-08218721-50f5-4b34-92e5-f74578eb2097 req-d5dfbed6-3737-4e10-ba85-be6453c4fa69 service nova] Lock "8f7c865d-1207-4300-b721-25b196f7a2f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.899028] env[65758]: DEBUG nova.compute.manager [req-08218721-50f5-4b34-92e5-f74578eb2097 req-d5dfbed6-3737-4e10-ba85-be6453c4fa69 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] No waiting events found dispatching network-vif-plugged-eca37d97-ed85-4bcf-b389-e161b7507b5b {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1104.899155] env[65758]: WARNING nova.compute.manager [req-08218721-50f5-4b34-92e5-f74578eb2097 req-d5dfbed6-3737-4e10-ba85-be6453c4fa69 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Received unexpected event network-vif-plugged-eca37d97-ed85-4bcf-b389-e161b7507b5b for instance with vm_state building and task_state spawning. [ 1104.940234] env[65758]: DEBUG nova.network.neutron [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Successfully updated port: eca37d97-ed85-4bcf-b389-e161b7507b5b {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1104.998594] env[65758]: DEBUG oslo_vmware.api [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661204, 'name': PowerOffVM_Task, 'duration_secs': 0.424968} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.998932] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.999112] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1104.999382] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d14980b6-74d8-424c-84fa-06e37442fc28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.041075] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.041498] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.085248] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da5e449-4128-4a75-ac06-e7fc9ca1616b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.088193] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.088463] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa547806-40b8-46a9-812b-98f34e164eb3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.096388] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7efa26-d3bb-48b2-914e-ac3ee4f52962 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.101090] env[65758]: DEBUG oslo_vmware.api [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1105.101090] env[65758]: value = "task-4661207" [ 1105.101090] env[65758]: _type = "Task" [ 1105.101090] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.134341] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.134604] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.134789] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Deleting the datastore file [datastore1] a014debf-2f16-4b30-af78-27a6751060de {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.136069] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c102e6b4-71df-44a2-8a22-eef1738ad20d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.139063] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d406402-073b-4165-9f39-899dacf6fa2a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.144478] env[65758]: DEBUG oslo_vmware.api [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661207, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.151956] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9682d338-72bf-4294-9f9d-5b77c66f2bb9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.156601] env[65758]: DEBUG oslo_vmware.api [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for the task: (returnval){ [ 1105.156601] env[65758]: value = "task-4661208" [ 1105.156601] env[65758]: _type = "Task" [ 1105.156601] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.169217] env[65758]: DEBUG nova.compute.provider_tree [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.176779] env[65758]: DEBUG oslo_vmware.api [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661208, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.343694] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661205, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079625} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.343874] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1105.344674] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2405cb-a79a-4ab1-9da4-b862271df709 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.368996] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 62ae50af-ff52-4084-8161-1a650eff5247/62ae50af-ff52-4084-8161-1a650eff5247.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.369309] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acf1d2d0-445e-4061-acf0-addbb28c2992 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.390518] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1105.390518] env[65758]: value = "task-4661209" [ 1105.390518] env[65758]: _type = "Task" [ 1105.390518] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.399292] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.444062] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-8f7c865d-1207-4300-b721-25b196f7a2f9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.444062] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-8f7c865d-1207-4300-b721-25b196f7a2f9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.444062] env[65758]: DEBUG nova.network.neutron [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1105.545120] env[65758]: INFO nova.compute.manager [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Detaching volume da1ad087-09d2-4369-aa55-0371cd8a59fe [ 1105.585455] env[65758]: INFO nova.virt.block_device [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Attempting to driver detach volume da1ad087-09d2-4369-aa55-0371cd8a59fe from mountpoint /dev/sdb [ 1105.585713] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1105.585895] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910049', 'volume_id': 'da1ad087-09d2-4369-aa55-0371cd8a59fe', 'name': 'volume-da1ad087-09d2-4369-aa55-0371cd8a59fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ade1d760-e3e7-49c8-ba9d-b4829ca60841', 'attached_at': '', 'detached_at': '', 'volume_id': 'da1ad087-09d2-4369-aa55-0371cd8a59fe', 'serial': 'da1ad087-09d2-4369-aa55-0371cd8a59fe'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1105.586814] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd516f7-50fb-4374-85db-c227c2162c8a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.617236] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687e48d4-26de-4e58-ab31-88ed9f88ac34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.625480] env[65758]: DEBUG oslo_vmware.api [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661207, 'name': PowerOnVM_Task, 'duration_secs': 0.4642} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.627198] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.627443] env[65758]: DEBUG nova.compute.manager [None req-6d76c5a7-8788-451f-a607-42e40c6df2b0 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1105.628356] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c481c1-6d83-4eb4-86dd-4f79198f6dc6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.631096] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0efa29-6e63-4bb2-91b4-7df9bc99389d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.664772] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c9d386-2547-4179-8eff-6ecd5835a227 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.674109] env[65758]: DEBUG nova.scheduler.client.report [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.677915] env[65758]: DEBUG oslo_vmware.api [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Task: {'id': task-4661208, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284134} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.693750] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.694138] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.694138] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.694335] env[65758]: INFO nova.compute.manager [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [instance: a014debf-2f16-4b30-af78-27a6751060de] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1105.694701] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1105.694969] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The volume has not been displaced from its original location: [datastore1] volume-da1ad087-09d2-4369-aa55-0371cd8a59fe/volume-da1ad087-09d2-4369-aa55-0371cd8a59fe.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1105.701062] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1105.701917] env[65758]: DEBUG nova.compute.manager [-] [instance: a014debf-2f16-4b30-af78-27a6751060de] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1105.702073] env[65758]: DEBUG nova.network.neutron [-] [instance: a014debf-2f16-4b30-af78-27a6751060de] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1105.702343] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1105.702961] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1105.703258] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1105.710569] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97f8c9c5-0ab0-4239-82ca-f8f9ca856555 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.733808] env[65758]: DEBUG oslo_vmware.api [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1105.733808] env[65758]: value = "task-4661210" [ 1105.733808] env[65758]: _type = "Task" [ 1105.733808] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.743760] env[65758]: DEBUG oslo_vmware.api [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.759349] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1105.778348] env[65758]: WARNING neutronclient.v2_0.client [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1105.778743] env[65758]: WARNING neutronclient.v2_0.client [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1105.902901] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.948310] env[65758]: WARNING openstack [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1105.949180] env[65758]: WARNING openstack [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1106.012699] env[65758]: DEBUG nova.network.neutron [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Port 05e0fa46-1b67-477a-bc40-26c9641f6549 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 1106.092826] env[65758]: DEBUG nova.network.neutron [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1106.180612] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.207353] env[65758]: INFO nova.scheduler.client.report [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleted allocations for instance 974d06c1-2704-4a78-bbd7-f54335c4288e [ 1106.246552] env[65758]: DEBUG oslo_vmware.api [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661210, 'name': ReconfigVM_Task, 'duration_secs': 0.297187} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.247107] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1106.253042] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a5c7020-c814-48b5-b98b-649b08a87361 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.272744] env[65758]: DEBUG oslo_vmware.api [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1106.272744] env[65758]: value = "task-4661211" [ 1106.272744] env[65758]: _type = "Task" [ 1106.272744] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.282632] env[65758]: DEBUG oslo_vmware.api [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.301837] env[65758]: WARNING neutronclient.v2_0.client [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1106.301837] env[65758]: WARNING openstack [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1106.301837] env[65758]: WARNING openstack [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1106.408396] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661209, 'name': ReconfigVM_Task, 'duration_secs': 0.641134} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.410052] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 62ae50af-ff52-4084-8161-1a650eff5247/62ae50af-ff52-4084-8161-1a650eff5247.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.410969] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-642080f3-41f0-449b-9188-7cdad4af860d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.420703] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1106.420703] env[65758]: value = "task-4661212" [ 1106.420703] env[65758]: _type = "Task" [ 1106.420703] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.429816] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661212, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.444510] env[65758]: DEBUG nova.network.neutron [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Updating instance_info_cache with network_info: [{"id": "eca37d97-ed85-4bcf-b389-e161b7507b5b", "address": "fa:16:3e:4d:ce:8b", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeca37d97-ed", "ovs_interfaceid": "eca37d97-ed85-4bcf-b389-e161b7507b5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1106.480607] env[65758]: DEBUG nova.network.neutron [-] [instance: a014debf-2f16-4b30-af78-27a6751060de] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1106.719194] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e23f5ab5-0462-4416-8cde-32dd5670acff tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "974d06c1-2704-4a78-bbd7-f54335c4288e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.408s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.785032] env[65758]: DEBUG oslo_vmware.api [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661211, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.901408] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.901677] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.901899] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.902085] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.902519] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.904678] env[65758]: INFO nova.compute.manager [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Terminating instance [ 1106.932320] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661212, 'name': Rename_Task, 'duration_secs': 0.258466} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.932620] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.932880] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfdd27ad-a879-47fe-9af9-fe82822cfb16 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.937121] env[65758]: DEBUG nova.compute.manager [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Received event network-changed-eca37d97-ed85-4bcf-b389-e161b7507b5b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1106.937307] env[65758]: DEBUG nova.compute.manager [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Refreshing instance network info cache due to event network-changed-eca37d97-ed85-4bcf-b389-e161b7507b5b. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1106.937514] env[65758]: DEBUG oslo_concurrency.lockutils [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] Acquiring lock "refresh_cache-8f7c865d-1207-4300-b721-25b196f7a2f9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.944586] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1106.944586] env[65758]: value = "task-4661213" [ 1106.944586] env[65758]: _type = "Task" [ 1106.944586] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.948510] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-8f7c865d-1207-4300-b721-25b196f7a2f9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.948825] env[65758]: DEBUG nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Instance network_info: |[{"id": "eca37d97-ed85-4bcf-b389-e161b7507b5b", "address": "fa:16:3e:4d:ce:8b", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeca37d97-ed", "ovs_interfaceid": "eca37d97-ed85-4bcf-b389-e161b7507b5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1106.949174] env[65758]: DEBUG oslo_concurrency.lockutils [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] Acquired lock "refresh_cache-8f7c865d-1207-4300-b721-25b196f7a2f9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.949349] env[65758]: DEBUG nova.network.neutron [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Refreshing network info cache for port eca37d97-ed85-4bcf-b389-e161b7507b5b {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1106.950647] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:ce:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eca37d97-ed85-4bcf-b389-e161b7507b5b', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1106.958154] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1106.959738] env[65758]: WARNING neutronclient.v2_0.client [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1106.960412] env[65758]: WARNING openstack [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1106.960775] env[65758]: WARNING openstack [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1106.968438] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1106.972654] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79479e9a-5450-48c5-ac3c-8172f1b8dd15 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.989464] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661213, 'name': PowerOnVM_Task} progress is 33%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.989951] env[65758]: INFO nova.compute.manager [-] [instance: a014debf-2f16-4b30-af78-27a6751060de] Took 1.29 seconds to deallocate network for instance. [ 1106.998576] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1106.998576] env[65758]: value = "task-4661214" [ 1106.998576] env[65758]: _type = "Task" [ 1106.998576] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.010749] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661214, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.034332] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.034660] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.034951] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.157878] env[65758]: WARNING neutronclient.v2_0.client [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1107.158688] env[65758]: WARNING openstack [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1107.159114] env[65758]: WARNING openstack [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1107.255885] env[65758]: DEBUG nova.network.neutron [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Updated VIF entry in instance network info cache for port eca37d97-ed85-4bcf-b389-e161b7507b5b. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1107.256271] env[65758]: DEBUG nova.network.neutron [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Updating instance_info_cache with network_info: [{"id": "eca37d97-ed85-4bcf-b389-e161b7507b5b", "address": "fa:16:3e:4d:ce:8b", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeca37d97-ed", "ovs_interfaceid": "eca37d97-ed85-4bcf-b389-e161b7507b5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1107.284925] env[65758]: DEBUG oslo_vmware.api [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661211, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.410313] env[65758]: DEBUG nova.compute.manager [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1107.410620] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1107.412012] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06aaa4ca-b699-459f-b477-6f37c9f0056f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.424396] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1107.424724] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80878d94-7e5a-42dd-a5af-babd1cd9b0d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.433282] env[65758]: DEBUG oslo_vmware.api [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1107.433282] env[65758]: value = "task-4661215" [ 1107.433282] env[65758]: _type = "Task" [ 1107.433282] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.444244] env[65758]: DEBUG oslo_vmware.api [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.456335] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661213, 'name': PowerOnVM_Task} progress is 78%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.498071] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.498355] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.498781] env[65758]: DEBUG nova.objects.instance [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lazy-loading 'resources' on Instance uuid a014debf-2f16-4b30-af78-27a6751060de {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1107.512960] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661214, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.739659] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.739993] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.740196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.740395] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.740569] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.743031] env[65758]: INFO nova.compute.manager [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Terminating instance [ 1107.759760] env[65758]: DEBUG oslo_concurrency.lockutils [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] Releasing lock "refresh_cache-8f7c865d-1207-4300-b721-25b196f7a2f9" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.760052] env[65758]: DEBUG nova.compute.manager [req-3d9b2147-e82b-4c02-8b13-7f762b9ed00c req-6d140a72-ec51-4a95-9830-0980ac2e5142 service nova] [instance: a014debf-2f16-4b30-af78-27a6751060de] Received event network-vif-deleted-8fa0a611-f7a8-44d4-8921-988332d441bc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1107.786522] env[65758]: DEBUG oslo_vmware.api [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661211, 'name': ReconfigVM_Task, 'duration_secs': 1.174799} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.786833] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910049', 'volume_id': 'da1ad087-09d2-4369-aa55-0371cd8a59fe', 'name': 'volume-da1ad087-09d2-4369-aa55-0371cd8a59fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ade1d760-e3e7-49c8-ba9d-b4829ca60841', 'attached_at': '', 'detached_at': '', 'volume_id': 'da1ad087-09d2-4369-aa55-0371cd8a59fe', 'serial': 'da1ad087-09d2-4369-aa55-0371cd8a59fe'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1107.943664] env[65758]: DEBUG oslo_vmware.api [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661215, 'name': PowerOffVM_Task, 'duration_secs': 0.205973} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.944501] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1107.944501] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1107.944714] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83610343-564f-46ba-b3b4-8d5616f3369b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.955705] env[65758]: DEBUG oslo_vmware.api [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661213, 'name': PowerOnVM_Task, 'duration_secs': 0.735505} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.955966] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1107.956192] env[65758]: INFO nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Took 8.08 seconds to spawn the instance on the hypervisor. [ 1107.956458] env[65758]: DEBUG nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1107.957242] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c90a6e-bdd8-4a19-aa06-ddfb12cb3554 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.012645] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661214, 'name': CreateVM_Task, 'duration_secs': 0.722713} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.012797] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.013292] env[65758]: WARNING neutronclient.v2_0.client [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1108.014160] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.014352] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.014657] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1108.015797] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b2ea7ce-3dc4-4f37-9a23-a22d79ae6595 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.021119] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1108.021337] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1108.021554] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleting the datastore file [datastore2] 76ec31e6-65c2-4290-9ec0-b274be95baa4 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1108.023133] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62a9a89c-9082-42f9-a844-c2aa2b49e654 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.025240] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1108.025240] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c9637e-ef60-1a75-8fb2-84ccc1e29e1d" [ 1108.025240] env[65758]: _type = "Task" [ 1108.025240] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.031640] env[65758]: DEBUG oslo_vmware.api [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1108.031640] env[65758]: value = "task-4661217" [ 1108.031640] env[65758]: _type = "Task" [ 1108.031640] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.042801] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c9637e-ef60-1a75-8fb2-84ccc1e29e1d, 'name': SearchDatastore_Task, 'duration_secs': 0.010563} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.043511] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.043775] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.044025] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.044195] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.044406] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.044693] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-719ff02e-2b1f-4627-af15-2d64911ffea5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.053496] env[65758]: WARNING neutronclient.v2_0.client [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1108.056377] env[65758]: DEBUG oslo_vmware.api [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.064399] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.064680] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1108.065621] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af54fa74-2783-4b53-91a7-8a966359ed43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.075929] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1108.075929] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5247b6ba-456d-d1da-ecef-90e76eb917f2" [ 1108.075929] env[65758]: _type = "Task" [ 1108.075929] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.088032] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5247b6ba-456d-d1da-ecef-90e76eb917f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.089066] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.089240] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.089426] env[65758]: DEBUG nova.network.neutron [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1108.201583] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edcab18-f094-4564-b7b8-410dc2b4bfea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.210144] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3c50af-96b0-4142-8937-c26629e99171 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.245482] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6f1a05-3cfa-4c56-a870-6236d4afbb2c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.248578] env[65758]: DEBUG nova.compute.manager [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1108.248824] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1108.249568] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efef0516-9610-4eeb-9b1d-9e87b7418c41 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.257604] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1108.259386] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7bb1b9b-fec1-4936-b961-6993e1cd55df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.262372] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bf7cbd-cc17-41b3-a7ad-cb03613e9d2d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.277060] env[65758]: DEBUG nova.compute.provider_tree [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1108.281259] env[65758]: DEBUG oslo_vmware.api [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1108.281259] env[65758]: value = "task-4661218" [ 1108.281259] env[65758]: _type = "Task" [ 1108.281259] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.294354] env[65758]: DEBUG oslo_vmware.api [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.341901] env[65758]: DEBUG nova.objects.instance [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid ade1d760-e3e7-49c8-ba9d-b4829ca60841 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.476981] env[65758]: INFO nova.compute.manager [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Took 16.27 seconds to build instance. [ 1108.545454] env[65758]: DEBUG oslo_vmware.api [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153769} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.545754] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1108.546088] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1108.546250] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1108.546444] env[65758]: INFO nova.compute.manager [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1108.546657] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1108.546858] env[65758]: DEBUG nova.compute.manager [-] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1108.546955] env[65758]: DEBUG nova.network.neutron [-] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1108.547216] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1108.547775] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1108.548050] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1108.587229] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5247b6ba-456d-d1da-ecef-90e76eb917f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010592} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.588408] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1108.591021] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc7ea5b1-bc6e-461d-abef-dd989eb64008 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.593564] env[65758]: WARNING neutronclient.v2_0.client [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1108.594193] env[65758]: WARNING openstack [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1108.594605] env[65758]: WARNING openstack [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1108.605443] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1108.605443] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52829f52-41d1-fb28-d754-61491cef3d32" [ 1108.605443] env[65758]: _type = "Task" [ 1108.605443] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.617733] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52829f52-41d1-fb28-d754-61491cef3d32, 'name': SearchDatastore_Task} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.618433] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.618433] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 8f7c865d-1207-4300-b721-25b196f7a2f9/8f7c865d-1207-4300-b721-25b196f7a2f9.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1108.618659] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-202b9b51-d46d-47e2-8d3c-ca42f3afde53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.626959] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1108.626959] env[65758]: value = "task-4661219" [ 1108.626959] env[65758]: _type = "Task" [ 1108.626959] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.636385] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.793440] env[65758]: DEBUG oslo_vmware.api [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661218, 'name': PowerOffVM_Task, 'duration_secs': 0.427407} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.793440] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1108.793440] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1108.793834] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a90d8bf-e207-4f01-acaf-cfbe4c150a78 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.798997] env[65758]: ERROR nova.scheduler.client.report [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] [req-0df1dba5-a492-4f2e-9af7-4ebea3e36bac] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0df1dba5-a492-4f2e-9af7-4ebea3e36bac"}]} [ 1108.822050] env[65758]: DEBUG nova.scheduler.client.report [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1108.840577] env[65758]: DEBUG nova.scheduler.client.report [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1108.840794] env[65758]: DEBUG nova.compute.provider_tree [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1108.855326] env[65758]: DEBUG nova.scheduler.client.report [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: None {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1108.877127] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1108.877127] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1108.877127] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleting the datastore file [datastore2] ec1e2845-e73a-40ff-9b6c-1d8281859fba {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1108.877127] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7572c8d1-9040-4c69-ae89-117556e9a6c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.886488] env[65758]: DEBUG oslo_vmware.api [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for the task: (returnval){ [ 1108.886488] env[65758]: value = "task-4661221" [ 1108.886488] env[65758]: _type = "Task" [ 1108.886488] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.891406] env[65758]: DEBUG nova.scheduler.client.report [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1108.902338] env[65758]: DEBUG oslo_vmware.api [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.983291] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f0f0f555-599c-4d1f-b32e-8954946b6fcc tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "62ae50af-ff52-4084-8161-1a650eff5247" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.782s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.120646] env[65758]: WARNING neutronclient.v2_0.client [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1109.121404] env[65758]: WARNING openstack [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1109.122009] env[65758]: WARNING openstack [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1109.138468] env[65758]: DEBUG nova.compute.manager [req-46590fe5-725d-4e6e-99ff-ef2805a182cc req-77890716-1376-48fa-b5ae-1739c4616c90 service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Received event network-vif-deleted-2e41907c-1553-48df-9644-cb422d2f19df {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1109.139066] env[65758]: INFO nova.compute.manager [req-46590fe5-725d-4e6e-99ff-ef2805a182cc req-77890716-1376-48fa-b5ae-1739c4616c90 service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Neutron deleted interface 2e41907c-1553-48df-9644-cb422d2f19df; detaching it from the instance and deleting it from the info cache [ 1109.139066] env[65758]: DEBUG nova.network.neutron [req-46590fe5-725d-4e6e-99ff-ef2805a182cc req-77890716-1376-48fa-b5ae-1739c4616c90 service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1109.150266] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502703} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.150266] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 8f7c865d-1207-4300-b721-25b196f7a2f9/8f7c865d-1207-4300-b721-25b196f7a2f9.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1109.150266] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1109.150266] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8cad12c-9e21-4ca6-bed1-88dd160b7f70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.159073] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1109.159073] env[65758]: value = "task-4661222" [ 1109.159073] env[65758]: _type = "Task" [ 1109.159073] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.165300] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0245289-e11c-4907-9c43-cc805054d334 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.171955] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661222, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.177990] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe3d64e-4d0c-4ea2-a00a-66cbaa33fbcf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.220638] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd58980-ecbe-4c8a-a139-87b379eb3edd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.230715] env[65758]: DEBUG nova.network.neutron [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance_info_cache with network_info: [{"id": "05e0fa46-1b67-477a-bc40-26c9641f6549", "address": "fa:16:3e:6d:7e:f5", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05e0fa46-1b", "ovs_interfaceid": "05e0fa46-1b67-477a-bc40-26c9641f6549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1109.235326] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6d050d-f52a-442e-ab95-b05904bb941b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.251019] env[65758]: DEBUG nova.compute.provider_tree [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1109.350033] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3377e638-fd1a-4607-be4f-72ee48e14b8f tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.308s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.397384] env[65758]: DEBUG oslo_vmware.api [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Task: {'id': task-4661221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.360116} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.397647] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1109.397837] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1109.398012] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1109.398187] env[65758]: INFO nova.compute.manager [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1109.398430] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1109.398632] env[65758]: DEBUG nova.compute.manager [-] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1109.398771] env[65758]: DEBUG nova.network.neutron [-] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1109.398985] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1109.399550] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1109.399807] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1109.437453] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1109.553662] env[65758]: DEBUG nova.compute.manager [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1109.553883] env[65758]: DEBUG nova.compute.manager [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing instance network info cache due to event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1109.554132] env[65758]: DEBUG oslo_concurrency.lockutils [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.554412] env[65758]: DEBUG oslo_concurrency.lockutils [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.555137] env[65758]: DEBUG nova.network.neutron [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1109.597894] env[65758]: DEBUG nova.network.neutron [-] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1109.599560] env[65758]: DEBUG oslo_concurrency.lockutils [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.599692] env[65758]: DEBUG oslo_concurrency.lockutils [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.643225] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67162b58-09e6-4752-a89d-2ba130af3fde {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.655291] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70937eb1-49b7-45b5-a933-a75505130744 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.684879] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661222, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072197} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.684879] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1109.685990] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6add8171-19cf-4de3-b78b-c62cee26e36c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.702815] env[65758]: DEBUG nova.compute.manager [req-46590fe5-725d-4e6e-99ff-ef2805a182cc req-77890716-1376-48fa-b5ae-1739c4616c90 service nova] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Detach interface failed, port_id=2e41907c-1553-48df-9644-cb422d2f19df, reason: Instance 76ec31e6-65c2-4290-9ec0-b274be95baa4 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1109.727903] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 8f7c865d-1207-4300-b721-25b196f7a2f9/8f7c865d-1207-4300-b721-25b196f7a2f9.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1109.728636] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b451300-7b2e-4009-bad0-1f887b96db49 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.744843] env[65758]: DEBUG oslo_concurrency.lockutils [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.761727] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1109.761727] env[65758]: value = "task-4661223" [ 1109.761727] env[65758]: _type = "Task" [ 1109.761727] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.771258] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661223, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.796804] env[65758]: DEBUG nova.scheduler.client.report [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 136 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1109.797263] env[65758]: DEBUG nova.compute.provider_tree [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 136 to 137 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1109.797460] env[65758]: DEBUG nova.compute.provider_tree [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1110.057887] env[65758]: WARNING neutronclient.v2_0.client [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1110.058647] env[65758]: WARNING openstack [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1110.059240] env[65758]: WARNING openstack [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1110.103026] env[65758]: INFO nova.compute.manager [-] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Took 1.56 seconds to deallocate network for instance. [ 1110.104773] env[65758]: INFO nova.compute.manager [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Detaching volume 521fcd20-a69b-4128-829d-bb436cf0d10d [ 1110.152100] env[65758]: INFO nova.virt.block_device [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Attempting to driver detach volume 521fcd20-a69b-4128-829d-bb436cf0d10d from mountpoint /dev/sdc [ 1110.152320] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1110.152426] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910051', 'volume_id': '521fcd20-a69b-4128-829d-bb436cf0d10d', 'name': 'volume-521fcd20-a69b-4128-829d-bb436cf0d10d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ade1d760-e3e7-49c8-ba9d-b4829ca60841', 'attached_at': '', 'detached_at': '', 'volume_id': '521fcd20-a69b-4128-829d-bb436cf0d10d', 'serial': '521fcd20-a69b-4128-829d-bb436cf0d10d'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1110.153416] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d400dc-0d5e-4a06-8e68-84942d6c9058 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.177510] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71e8871-9355-4dd9-9964-1492535cc72c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.181959] env[65758]: DEBUG nova.network.neutron [-] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1110.188669] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4483e71-ab63-4a0c-b408-66a08a628ad3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.211081] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f09b469-aec8-40ca-a4ad-5ea8a390fb4d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.233261] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] The volume has not been displaced from its original location: [datastore1] volume-521fcd20-a69b-4128-829d-bb436cf0d10d/volume-521fcd20-a69b-4128-829d-bb436cf0d10d.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1110.238583] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfiguring VM instance instance-0000005f to detach disk 2002 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1110.238973] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77d0d4d6-d9e8-462c-bf9d-68dc5bf5b116 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.253069] env[65758]: WARNING neutronclient.v2_0.client [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1110.253774] env[65758]: WARNING openstack [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1110.254147] env[65758]: WARNING openstack [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1110.270214] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6e2935-dbd2-4973-b626-2a94ee3e122c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.275425] env[65758]: DEBUG oslo_vmware.api [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1110.275425] env[65758]: value = "task-4661224" [ 1110.275425] env[65758]: _type = "Task" [ 1110.275425] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.284111] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661223, 'name': ReconfigVM_Task, 'duration_secs': 0.297113} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.285282] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c3976e-38d0-430f-91de-cfe751fdb600 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.288048] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 8f7c865d-1207-4300-b721-25b196f7a2f9/8f7c865d-1207-4300-b721-25b196f7a2f9.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1110.292441] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a3ce933-c50c-4f30-9660-3fa27e1e15f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.294148] env[65758]: DEBUG oslo_vmware.api [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661224, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.302212] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1110.302212] env[65758]: value = "task-4661225" [ 1110.302212] env[65758]: _type = "Task" [ 1110.302212] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.309523] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.811s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.327138] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661225, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.350727] env[65758]: INFO nova.scheduler.client.report [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Deleted allocations for instance a014debf-2f16-4b30-af78-27a6751060de [ 1110.381705] env[65758]: DEBUG nova.network.neutron [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updated VIF entry in instance network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1110.381705] env[65758]: DEBUG nova.network.neutron [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1110.500581] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.500823] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.611956] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.612443] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.612894] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.644098] env[65758]: INFO nova.scheduler.client.report [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted allocations for instance 76ec31e6-65c2-4290-9ec0-b274be95baa4 [ 1110.684635] env[65758]: INFO nova.compute.manager [-] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Took 1.29 seconds to deallocate network for instance. [ 1110.786511] env[65758]: DEBUG oslo_vmware.api [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661224, 'name': ReconfigVM_Task, 'duration_secs': 0.257736} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.786810] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Reconfigured VM instance instance-0000005f to detach disk 2002 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1110.791763] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54aec695-8a11-417f-97fc-f96e77e0f559 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.810603] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661225, 'name': Rename_Task, 'duration_secs': 0.185493} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.812101] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1110.812369] env[65758]: DEBUG oslo_vmware.api [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1110.812369] env[65758]: value = "task-4661226" [ 1110.812369] env[65758]: _type = "Task" [ 1110.812369] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.812578] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9f88e4a-c555-447b-9f33-1b21ef7f7dca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.824153] env[65758]: DEBUG oslo_vmware.api [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.825509] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1110.825509] env[65758]: value = "task-4661227" [ 1110.825509] env[65758]: _type = "Task" [ 1110.825509] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.834113] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661227, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.859842] env[65758]: DEBUG oslo_concurrency.lockutils [None req-747a7894-fcad-4e1f-b243-53bf07e7c086 tempest-ImagesOneServerTestJSON-1385457576 tempest-ImagesOneServerTestJSON-1385457576-project-member] Lock "a014debf-2f16-4b30-af78-27a6751060de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.902s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.883322] env[65758]: DEBUG oslo_concurrency.lockutils [req-df04d9d2-50f4-4406-be52-bb970b90439b req-6c7b4064-c677-43b4-a24b-2904d7e4e6da service nova] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.003477] env[65758]: DEBUG nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1111.152810] env[65758]: DEBUG oslo_concurrency.lockutils [None req-88634e4d-3827-4294-9706-f51fbf99b733 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "76ec31e6-65c2-4290-9ec0-b274be95baa4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.251s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.192654] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.192989] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.193229] env[65758]: DEBUG nova.objects.instance [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lazy-loading 'resources' on Instance uuid ec1e2845-e73a-40ff-9b6c-1d8281859fba {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.325877] env[65758]: DEBUG oslo_vmware.api [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661226, 'name': ReconfigVM_Task, 'duration_secs': 0.156843} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.325877] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910051', 'volume_id': '521fcd20-a69b-4128-829d-bb436cf0d10d', 'name': 'volume-521fcd20-a69b-4128-829d-bb436cf0d10d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ade1d760-e3e7-49c8-ba9d-b4829ca60841', 'attached_at': '', 'detached_at': '', 'volume_id': '521fcd20-a69b-4128-829d-bb436cf0d10d', 'serial': '521fcd20-a69b-4128-829d-bb436cf0d10d'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1111.338163] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661227, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.433198] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8414fd-54c3-439a-929c-73d42bcef48a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.455952] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e2b5ad-f8e3-4cb3-86bf-5810db460c36 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.464900] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance '149655f8-fcf5-4cfe-ab96-1171b9d3b550' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1111.526151] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.851032] env[65758]: DEBUG oslo_vmware.api [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661227, 'name': PowerOnVM_Task, 'duration_secs': 0.516523} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.851032] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1111.851032] env[65758]: INFO nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Took 7.33 seconds to spawn the instance on the hypervisor. [ 1111.851032] env[65758]: DEBUG nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1111.851032] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca67f987-3cb2-4d5f-b816-48f947c7ec00 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.883414] env[65758]: DEBUG nova.objects.instance [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'flavor' on Instance uuid ade1d760-e3e7-49c8-ba9d-b4829ca60841 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.890076] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c256d81f-7553-42fa-b7f4-da63f4f0d14c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.898259] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad1e62b-3136-4e10-9bd8-4386a4a014c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.934451] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d2e40f-7d27-4738-92de-44255997ae4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.944582] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e398502-4da2-4cfc-a2c7-d4cae5e452bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.950182] env[65758]: DEBUG nova.compute.manager [req-69c90f3c-6e5a-4fca-8898-f002a5b90635 req-b7d00dc3-5b0a-48bb-a2a5-2b81295da38b service nova] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Received event network-vif-deleted-09744327-3d1a-4d1b-8f3c-8532ca1fef21 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1111.962182] env[65758]: DEBUG nova.compute.provider_tree [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.971918] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.972824] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-029a7de3-8306-40c3-b467-e97ac084090b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.981664] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1111.981664] env[65758]: value = "task-4661228" [ 1111.981664] env[65758]: _type = "Task" [ 1111.981664] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.991864] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.139416] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.139711] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.374588] env[65758]: INFO nova.compute.manager [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Took 17.83 seconds to build instance. [ 1112.465490] env[65758]: DEBUG nova.scheduler.client.report [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.495009] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661228, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.642120] env[65758]: DEBUG nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1112.744117] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73dc8f1-2c3b-4b9f-989b-ad352738553e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.751109] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-50876fda-edd7-40de-9cc8-dbd2b3b1ffa5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Suspending the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1112.751840] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9aa10e06-a142-42ba-920b-70d5ace118df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.759537] env[65758]: DEBUG oslo_vmware.api [None req-50876fda-edd7-40de-9cc8-dbd2b3b1ffa5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1112.759537] env[65758]: value = "task-4661229" [ 1112.759537] env[65758]: _type = "Task" [ 1112.759537] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.769218] env[65758]: DEBUG oslo_vmware.api [None req-50876fda-edd7-40de-9cc8-dbd2b3b1ffa5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661229, 'name': SuspendVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.876815] env[65758]: DEBUG oslo_concurrency.lockutils [None req-eee83fe6-32b1-4e4f-a6f1-10ff29a19139 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "8f7c865d-1207-4300-b721-25b196f7a2f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.340s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.889570] env[65758]: DEBUG oslo_concurrency.lockutils [None req-039fdd0a-8b86-4b3b-87bd-86de4fd7a647 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.290s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.971935] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.778s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.975318] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.449s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.977829] env[65758]: INFO nova.compute.claims [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1112.996206] env[65758]: DEBUG oslo_vmware.api [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661228, 'name': PowerOnVM_Task, 'duration_secs': 0.752625} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.996289] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1112.996574] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-38b70b34-56f3-4787-af66-49f36d19a150 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance '149655f8-fcf5-4cfe-ab96-1171b9d3b550' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1113.003265] env[65758]: INFO nova.scheduler.client.report [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Deleted allocations for instance ec1e2845-e73a-40ff-9b6c-1d8281859fba [ 1113.174044] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.274490] env[65758]: DEBUG oslo_vmware.api [None req-50876fda-edd7-40de-9cc8-dbd2b3b1ffa5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661229, 'name': SuspendVM_Task} progress is 62%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.516383] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98aa736-07a0-4b01-ba8b-2822d29684e1 tempest-ServerRescueNegativeTestJSON-1164963118 tempest-ServerRescueNegativeTestJSON-1164963118-project-member] Lock "ec1e2845-e73a-40ff-9b6c-1d8281859fba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.776s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.773318] env[65758]: DEBUG oslo_vmware.api [None req-50876fda-edd7-40de-9cc8-dbd2b3b1ffa5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661229, 'name': SuspendVM_Task, 'duration_secs': 0.681267} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.773609] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-50876fda-edd7-40de-9cc8-dbd2b3b1ffa5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Suspended the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1113.773937] env[65758]: DEBUG nova.compute.manager [None req-50876fda-edd7-40de-9cc8-dbd2b3b1ffa5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1113.774825] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af10256-8d23-42bf-be59-34901f52890b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.101369] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.102430] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.102430] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.102555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.102666] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.104954] env[65758]: INFO nova.compute.manager [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Terminating instance [ 1114.173114] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50464270-64b3-456d-b19d-74b1b08dd2c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.181991] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6e63f9-3a96-4ae4-bf34-91cced3b63f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.214249] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6c2d71-2b58-400f-a21e-2f4157f3b456 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.222157] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81c6340-0e40-44fa-849e-f8471b8841ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.236521] env[65758]: DEBUG nova.compute.provider_tree [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.610317] env[65758]: DEBUG nova.compute.manager [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1114.610571] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.612243] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49197b54-5713-4791-9e36-230cfc9d4660 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.623092] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.623372] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2434a4a-33f5-4a2c-b9bd-704fb0bd1e8b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.631659] env[65758]: DEBUG oslo_vmware.api [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1114.631659] env[65758]: value = "task-4661230" [ 1114.631659] env[65758]: _type = "Task" [ 1114.631659] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.648258] env[65758]: DEBUG oslo_vmware.api [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.740658] env[65758]: DEBUG nova.scheduler.client.report [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1115.143253] env[65758]: DEBUG oslo_vmware.api [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661230, 'name': PowerOffVM_Task, 'duration_secs': 0.195792} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.143691] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.143761] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.145062] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95585284-c069-4651-97eb-19b7882ca34f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.219483] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.219797] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.219887] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Deleting the datastore file [datastore2] ade1d760-e3e7-49c8-ba9d-b4829ca60841 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.220667] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-122a102d-b47d-47e1-9ee4-f86bf5607249 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.228670] env[65758]: DEBUG oslo_vmware.api [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for the task: (returnval){ [ 1115.228670] env[65758]: value = "task-4661232" [ 1115.228670] env[65758]: _type = "Task" [ 1115.228670] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.238727] env[65758]: DEBUG oslo_vmware.api [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.245830] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.246448] env[65758]: DEBUG nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1115.249611] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.076s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.251512] env[65758]: INFO nova.compute.claims [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1115.311479] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "8f7c865d-1207-4300-b721-25b196f7a2f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.312645] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "8f7c865d-1207-4300-b721-25b196f7a2f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.312645] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "8f7c865d-1207-4300-b721-25b196f7a2f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.312645] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "8f7c865d-1207-4300-b721-25b196f7a2f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.312645] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "8f7c865d-1207-4300-b721-25b196f7a2f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.314860] env[65758]: INFO nova.compute.manager [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Terminating instance [ 1115.740754] env[65758]: DEBUG oslo_vmware.api [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Task: {'id': task-4661232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153968} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.741070] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.741272] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1115.741450] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1115.741628] env[65758]: INFO nova.compute.manager [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1115.741899] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1115.742124] env[65758]: DEBUG nova.compute.manager [-] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1115.742271] env[65758]: DEBUG nova.network.neutron [-] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1115.742537] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1115.743130] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1115.743360] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1115.757072] env[65758]: DEBUG nova.compute.utils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1115.758726] env[65758]: DEBUG nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1115.759623] env[65758]: DEBUG nova.network.neutron [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1115.759623] env[65758]: WARNING neutronclient.v2_0.client [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1115.759623] env[65758]: WARNING neutronclient.v2_0.client [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1115.760289] env[65758]: WARNING openstack [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1115.760530] env[65758]: WARNING openstack [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1115.792227] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1115.818962] env[65758]: DEBUG nova.policy [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1115.821420] env[65758]: DEBUG nova.compute.manager [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1115.821631] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1115.822821] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70c0689-0230-40ba-8d9b-b2b6b8624d45 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.831370] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.831606] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.831784] env[65758]: DEBUG nova.compute.manager [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Going to confirm migration 7 {{(pid=65758) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1115.833150] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.833601] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84a8a90b-fea6-4dcc-8ec5-1116a5c966d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.914234] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.914234] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.914549] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleting the datastore file [datastore2] 8f7c865d-1207-4300-b721-25b196f7a2f9 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.914887] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac50196b-d56e-471b-815c-035c4585e0d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.923812] env[65758]: DEBUG oslo_vmware.api [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1115.923812] env[65758]: value = "task-4661234" [ 1115.923812] env[65758]: _type = "Task" [ 1115.923812] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.935809] env[65758]: DEBUG oslo_vmware.api [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.271390] env[65758]: DEBUG nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1116.289248] env[65758]: DEBUG nova.network.neutron [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Successfully created port: 924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1116.338988] env[65758]: WARNING neutronclient.v2_0.client [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1116.405383] env[65758]: DEBUG nova.compute.manager [req-a528d964-f467-4702-aa74-e17e916d164a req-74e5ec79-3920-41bd-848f-d65ab8402f99 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Received event network-vif-deleted-40ae9fbf-7f23-48e1-bd47-7de2b62ace7e {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1116.405650] env[65758]: INFO nova.compute.manager [req-a528d964-f467-4702-aa74-e17e916d164a req-74e5ec79-3920-41bd-848f-d65ab8402f99 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Neutron deleted interface 40ae9fbf-7f23-48e1-bd47-7de2b62ace7e; detaching it from the instance and deleting it from the info cache [ 1116.405827] env[65758]: DEBUG nova.network.neutron [req-a528d964-f467-4702-aa74-e17e916d164a req-74e5ec79-3920-41bd-848f-d65ab8402f99 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1116.440438] env[65758]: DEBUG oslo_vmware.api [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239721} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.440438] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1116.440438] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1116.440438] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1116.440623] env[65758]: INFO nova.compute.manager [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1116.440967] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1116.441223] env[65758]: DEBUG nova.compute.manager [-] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1116.441347] env[65758]: DEBUG nova.network.neutron [-] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1116.441687] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1116.442402] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1116.442643] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1116.474188] env[65758]: WARNING neutronclient.v2_0.client [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1116.474606] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.474759] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquired lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.474923] env[65758]: DEBUG nova.network.neutron [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1116.475214] env[65758]: DEBUG nova.objects.instance [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'info_cache' on Instance uuid 149655f8-fcf5-4cfe-ab96-1171b9d3b550 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.528333] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1116.546699] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e527b9ed-b549-46bf-ac7f-ee398a7be5de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.555799] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06efde4d-30a7-40d6-b833-ad6bfcabf09d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.591305] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4722272f-e1e0-419f-aad6-7b15312ac57f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.599995] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1032062-c0ea-4473-a331-f2b6d0407a34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.616265] env[65758]: DEBUG nova.compute.provider_tree [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.843022] env[65758]: DEBUG nova.network.neutron [-] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1116.875848] env[65758]: DEBUG nova.compute.manager [req-a540e485-b909-4eae-8d1f-b8e41424fd84 req-13f659af-1869-4eac-8ccc-f0bfb2de1347 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Received event network-vif-deleted-eca37d97-ed85-4bcf-b389-e161b7507b5b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1116.876074] env[65758]: INFO nova.compute.manager [req-a540e485-b909-4eae-8d1f-b8e41424fd84 req-13f659af-1869-4eac-8ccc-f0bfb2de1347 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Neutron deleted interface eca37d97-ed85-4bcf-b389-e161b7507b5b; detaching it from the instance and deleting it from the info cache [ 1116.876240] env[65758]: DEBUG nova.network.neutron [req-a540e485-b909-4eae-8d1f-b8e41424fd84 req-13f659af-1869-4eac-8ccc-f0bfb2de1347 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1116.914613] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-037a7d02-19a0-4316-b4bd-25dc9354fc4f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.924963] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da1dda8-1083-4fe0-98eb-16146f012756 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.957666] env[65758]: DEBUG nova.compute.manager [req-a528d964-f467-4702-aa74-e17e916d164a req-74e5ec79-3920-41bd-848f-d65ab8402f99 service nova] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Detach interface failed, port_id=40ae9fbf-7f23-48e1-bd47-7de2b62ace7e, reason: Instance ade1d760-e3e7-49c8-ba9d-b4829ca60841 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1117.119752] env[65758]: DEBUG nova.scheduler.client.report [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.245986] env[65758]: DEBUG nova.network.neutron [-] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1117.285273] env[65758]: DEBUG nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1117.314988] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1117.314988] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1117.314988] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1117.314988] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1117.315265] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1117.315412] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1117.315677] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1117.316155] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1117.316155] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1117.316310] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1117.316554] env[65758]: DEBUG nova.virt.hardware [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1117.317469] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627da2d0-8cf6-410e-aa5b-1a7bc9933022 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.327244] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0006755b-5db2-43fd-9567-ad301f3c050a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.346431] env[65758]: INFO nova.compute.manager [-] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Took 1.60 seconds to deallocate network for instance. [ 1117.378668] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60cf6de2-f944-4261-8c9f-e671db1ea5e5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.389746] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68925eb6-8523-44c0-806f-316ceb7424e8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.422269] env[65758]: DEBUG nova.compute.manager [req-a540e485-b909-4eae-8d1f-b8e41424fd84 req-13f659af-1869-4eac-8ccc-f0bfb2de1347 service nova] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Detach interface failed, port_id=eca37d97-ed85-4bcf-b389-e161b7507b5b, reason: Instance 8f7c865d-1207-4300-b721-25b196f7a2f9 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1117.482061] env[65758]: WARNING neutronclient.v2_0.client [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1117.483365] env[65758]: WARNING openstack [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1117.484199] env[65758]: WARNING openstack [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1117.626278] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.626979] env[65758]: DEBUG nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1117.666024] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "a81095fb-6fe8-4b24-b763-1da983978460" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.666254] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "a81095fb-6fe8-4b24-b763-1da983978460" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.675592] env[65758]: WARNING neutronclient.v2_0.client [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1117.676311] env[65758]: WARNING openstack [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1117.676671] env[65758]: WARNING openstack [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1117.751181] env[65758]: INFO nova.compute.manager [-] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Took 1.31 seconds to deallocate network for instance. [ 1117.772503] env[65758]: DEBUG nova.network.neutron [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance_info_cache with network_info: [{"id": "05e0fa46-1b67-477a-bc40-26c9641f6549", "address": "fa:16:3e:6d:7e:f5", "network": {"id": "7bb66c9e-ab1e-4bb1-9b58-929677c55efb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2115206624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e114eef3998848699a9a086fee86db29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05e0fa46-1b", "ovs_interfaceid": "05e0fa46-1b67-477a-bc40-26c9641f6549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1117.854395] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.854764] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.854962] env[65758]: DEBUG nova.objects.instance [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lazy-loading 'resources' on Instance uuid ade1d760-e3e7-49c8-ba9d-b4829ca60841 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.972128] env[65758]: DEBUG nova.network.neutron [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Successfully updated port: 924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1118.133225] env[65758]: DEBUG nova.compute.utils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1118.134785] env[65758]: DEBUG nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1118.134992] env[65758]: DEBUG nova.network.neutron [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1118.135329] env[65758]: WARNING neutronclient.v2_0.client [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1118.135637] env[65758]: WARNING neutronclient.v2_0.client [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1118.136757] env[65758]: WARNING openstack [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1118.137134] env[65758]: WARNING openstack [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1118.144785] env[65758]: DEBUG nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1118.168151] env[65758]: DEBUG nova.compute.manager [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1118.191478] env[65758]: DEBUG nova.policy [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91358f51732f44198a020f6669168408', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4095654557a34bb0907071aedb3bb678', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1118.258725] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.274884] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Releasing lock "refresh_cache-149655f8-fcf5-4cfe-ab96-1171b9d3b550" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.275174] env[65758]: DEBUG nova.objects.instance [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'migration_context' on Instance uuid 149655f8-fcf5-4cfe-ab96-1171b9d3b550 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1118.444466] env[65758]: DEBUG nova.compute.manager [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received event network-vif-plugged-924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1118.444708] env[65758]: DEBUG oslo_concurrency.lockutils [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] Acquiring lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.444865] env[65758]: DEBUG oslo_concurrency.lockutils [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.445034] env[65758]: DEBUG oslo_concurrency.lockutils [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.445198] env[65758]: DEBUG nova.compute.manager [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] No waiting events found dispatching network-vif-plugged-924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1118.445355] env[65758]: WARNING nova.compute.manager [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received unexpected event network-vif-plugged-924f7463-7e8c-4f58-af04-46082cd691ed for instance with vm_state building and task_state spawning. [ 1118.445539] env[65758]: DEBUG nova.compute.manager [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received event network-changed-924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1118.445698] env[65758]: DEBUG nova.compute.manager [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing instance network info cache due to event network-changed-924f7463-7e8c-4f58-af04-46082cd691ed. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1118.445872] env[65758]: DEBUG oslo_concurrency.lockutils [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] Acquiring lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.445997] env[65758]: DEBUG oslo_concurrency.lockutils [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] Acquired lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.446159] env[65758]: DEBUG nova.network.neutron [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing network info cache for port 924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1118.474074] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.510578] env[65758]: DEBUG nova.network.neutron [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Successfully created port: b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1118.527323] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a83f5b-9497-4d34-aa50-724413ec6c87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.536676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9992bb39-33c9-4533-97b1-fc20f8e1da94 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.567868] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54520a53-3326-4365-837b-e30bc18acaa0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.576535] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526daa99-f80d-4f27-ab60-b88992d5c201 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.592982] env[65758]: DEBUG nova.compute.provider_tree [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.687289] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.778230] env[65758]: DEBUG nova.objects.base [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Object Instance<149655f8-fcf5-4cfe-ab96-1171b9d3b550> lazy-loaded attributes: info_cache,migration_context {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1118.779285] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89eff4bb-5be1-4300-9672-9b12f388b3d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.801553] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-593212d1-0b8c-4bbb-86d3-5d7ad76a7eb0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.807617] env[65758]: DEBUG oslo_vmware.api [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1118.807617] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525d4f3c-0a06-4682-d2cc-7f51d63620c5" [ 1118.807617] env[65758]: _type = "Task" [ 1118.807617] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.816935] env[65758]: DEBUG oslo_vmware.api [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525d4f3c-0a06-4682-d2cc-7f51d63620c5, 'name': SearchDatastore_Task, 'duration_secs': 0.006962} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.817222] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.949229] env[65758]: WARNING neutronclient.v2_0.client [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1118.949968] env[65758]: WARNING openstack [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1118.950374] env[65758]: WARNING openstack [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1118.993567] env[65758]: DEBUG nova.network.neutron [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1119.095158] env[65758]: DEBUG nova.scheduler.client.report [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1119.137401] env[65758]: DEBUG nova.network.neutron [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1119.154392] env[65758]: DEBUG nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1119.184502] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1119.184792] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1119.184946] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1119.185172] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1119.185284] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1119.185430] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1119.185617] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1119.185771] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1119.185935] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1119.186107] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1119.186285] env[65758]: DEBUG nova.virt.hardware [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1119.187533] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5308158a-352f-47b4-bdc1-93c25bd5dc61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.196861] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b1b999-e835-478a-bea7-953a5d3bf049 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.600825] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.746s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.603013] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.345s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.603254] env[65758]: DEBUG nova.objects.instance [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'resources' on Instance uuid 8f7c865d-1207-4300-b721-25b196f7a2f9 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.625977] env[65758]: INFO nova.scheduler.client.report [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Deleted allocations for instance ade1d760-e3e7-49c8-ba9d-b4829ca60841 [ 1119.639854] env[65758]: DEBUG oslo_concurrency.lockutils [req-3e2c981f-c77c-49bb-ac86-ef14c4dc18c2 req-3f547007-1812-471b-9edb-316138075025 service nova] Releasing lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.640276] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.640456] env[65758]: DEBUG nova.network.neutron [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1120.023677] env[65758]: DEBUG nova.network.neutron [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Successfully updated port: b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1120.133492] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e1b0608e-47f7-4c72-866b-568619e78740 tempest-AttachVolumeTestJSON-627906013 tempest-AttachVolumeTestJSON-627906013-project-member] Lock "ade1d760-e3e7-49c8-ba9d-b4829ca60841" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.031s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.143045] env[65758]: WARNING openstack [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1120.143527] env[65758]: WARNING openstack [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1120.182372] env[65758]: DEBUG nova.network.neutron [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1120.255471] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0e65a6-ce50-4eb9-9e75-e530e8704c25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.264158] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d6bccd-ee05-4f0a-ae71-5b7aa71c3c02 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.298987] env[65758]: WARNING neutronclient.v2_0.client [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1120.298987] env[65758]: WARNING openstack [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1120.298987] env[65758]: WARNING openstack [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1120.307535] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52df71a0-ac4e-4256-9c34-cd480a481b66 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.317397] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acadcf06-bdd7-4196-a7a3-f6023afacb8e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.332222] env[65758]: DEBUG nova.compute.provider_tree [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.397413] env[65758]: DEBUG nova.network.neutron [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [{"id": "924f7463-7e8c-4f58-af04-46082cd691ed", "address": "fa:16:3e:33:06:f7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924f7463-7e", "ovs_interfaceid": "924f7463-7e8c-4f58-af04-46082cd691ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1120.475352] env[65758]: DEBUG nova.compute.manager [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Received event network-vif-plugged-b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1120.475698] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] Acquiring lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.475865] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.475990] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.476167] env[65758]: DEBUG nova.compute.manager [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] No waiting events found dispatching network-vif-plugged-b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1120.476329] env[65758]: WARNING nova.compute.manager [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Received unexpected event network-vif-plugged-b91df992-11ae-4d37-af24-380860864b45 for instance with vm_state building and task_state spawning. [ 1120.476481] env[65758]: DEBUG nova.compute.manager [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Received event network-changed-b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1120.476699] env[65758]: DEBUG nova.compute.manager [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Refreshing instance network info cache due to event network-changed-b91df992-11ae-4d37-af24-380860864b45. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1120.476788] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] Acquiring lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.476930] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] Acquired lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.477074] env[65758]: DEBUG nova.network.neutron [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Refreshing network info cache for port b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1120.526483] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.836136] env[65758]: DEBUG nova.scheduler.client.report [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.900332] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.900774] env[65758]: DEBUG nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Instance network_info: |[{"id": "924f7463-7e8c-4f58-af04-46082cd691ed", "address": "fa:16:3e:33:06:f7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924f7463-7e", "ovs_interfaceid": "924f7463-7e8c-4f58-af04-46082cd691ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1120.901325] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:06:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '924f7463-7e8c-4f58-af04-46082cd691ed', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1120.909768] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1120.910065] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1120.910572] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33887a23-d18c-4fea-96b0-a789f739ced0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.931707] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1120.931707] env[65758]: value = "task-4661235" [ 1120.931707] env[65758]: _type = "Task" [ 1120.931707] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.941301] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661235, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.980817] env[65758]: WARNING neutronclient.v2_0.client [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1120.981523] env[65758]: WARNING openstack [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1120.981898] env[65758]: WARNING openstack [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1121.031216] env[65758]: DEBUG nova.network.neutron [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1121.127929] env[65758]: DEBUG nova.network.neutron [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1121.341754] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.739s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.344240] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.657s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.346200] env[65758]: INFO nova.compute.claims [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1121.367526] env[65758]: INFO nova.scheduler.client.report [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted allocations for instance 8f7c865d-1207-4300-b721-25b196f7a2f9 [ 1121.445289] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661235, 'name': CreateVM_Task, 'duration_secs': 0.363606} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.445499] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1121.446009] env[65758]: WARNING neutronclient.v2_0.client [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1121.446376] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.446561] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.447050] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1121.447331] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec196f89-374b-4e33-9a2a-5022df46616b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.452561] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1121.452561] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527773bb-ece7-c782-2f59-8f3a06128dbf" [ 1121.452561] env[65758]: _type = "Task" [ 1121.452561] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.461961] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527773bb-ece7-c782-2f59-8f3a06128dbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.630597] env[65758]: DEBUG oslo_concurrency.lockutils [req-e6d1442e-2941-41d5-ad89-9166f52be2f6 req-15122689-bd49-4715-94ea-447fc11fc479 service nova] Releasing lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.631029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.631235] env[65758]: DEBUG nova.network.neutron [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1121.878704] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73e033f8-5f23-4ecc-9c87-fcd406cf86d5 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "8f7c865d-1207-4300-b721-25b196f7a2f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.567s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.965105] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527773bb-ece7-c782-2f59-8f3a06128dbf, 'name': SearchDatastore_Task, 'duration_secs': 0.011542} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.965434] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.965670] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1121.965900] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.966053] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.966281] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1121.966570] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-973c87b3-250a-45ef-b4ad-c72bb3a25aba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.977513] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1121.977513] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1121.978331] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73009098-7ca4-414c-8e64-6d9b43bd6dfc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.984483] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1121.984483] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528ea3ed-c659-fc72-82f4-6d7e546b0327" [ 1121.984483] env[65758]: _type = "Task" [ 1121.984483] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.993338] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528ea3ed-c659-fc72-82f4-6d7e546b0327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.136097] env[65758]: WARNING openstack [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1122.136097] env[65758]: WARNING openstack [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1122.171478] env[65758]: DEBUG nova.network.neutron [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1122.249190] env[65758]: WARNING neutronclient.v2_0.client [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1122.249850] env[65758]: WARNING openstack [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1122.250241] env[65758]: WARNING openstack [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1122.337704] env[65758]: DEBUG nova.network.neutron [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [{"id": "b91df992-11ae-4d37-af24-380860864b45", "address": "fa:16:3e:13:ee:a2", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb91df992-11", "ovs_interfaceid": "b91df992-11ae-4d37-af24-380860864b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1122.497914] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528ea3ed-c659-fc72-82f4-6d7e546b0327, 'name': SearchDatastore_Task, 'duration_secs': 0.010463} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.502431] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52a12c50-9a56-40e5-a725-e5881667d353 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.510068] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1122.510068] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523d04db-79df-84cd-1756-aee18cfc468b" [ 1122.510068] env[65758]: _type = "Task" [ 1122.510068] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.514788] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f194fe1-5961-40bf-a21b-3ca72f2549b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.524643] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523d04db-79df-84cd-1756-aee18cfc468b, 'name': SearchDatastore_Task, 'duration_secs': 0.011152} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.527192] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.527504] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 8d0419d1-c301-4302-80c1-cd0fce7ccba4/8d0419d1-c301-4302-80c1-cd0fce7ccba4.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1122.527866] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33dffe9a-cb50-4818-bdcf-b1cb09b53481 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.531109] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89d7034-cb7b-4640-901f-b68d4f5e9065 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.569185] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf3c247-a132-4461-b4a3-8114583f2730 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.571962] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1122.571962] env[65758]: value = "task-4661237" [ 1122.571962] env[65758]: _type = "Task" [ 1122.571962] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.579218] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eab2683-78dd-4ef0-8653-94ff996ef4a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.586334] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.599036] env[65758]: DEBUG nova.compute.provider_tree [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.840883] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.841399] env[65758]: DEBUG nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Instance network_info: |[{"id": "b91df992-11ae-4d37-af24-380860864b45", "address": "fa:16:3e:13:ee:a2", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb91df992-11", "ovs_interfaceid": "b91df992-11ae-4d37-af24-380860864b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1122.841948] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:ee:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '438671d0-9468-4e44-84c1-4c0ebaa743e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b91df992-11ae-4d37-af24-380860864b45', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1122.851375] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1122.851658] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1122.851948] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d1073ac-1198-49d2-9d13-8ec95265f089 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.879559] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.879960] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.887123] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1122.887123] env[65758]: value = "task-4661238" [ 1122.887123] env[65758]: _type = "Task" [ 1122.887123] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.898771] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661238, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.083144] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522592} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.083466] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 8d0419d1-c301-4302-80c1-cd0fce7ccba4/8d0419d1-c301-4302-80c1-cd0fce7ccba4.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1123.083696] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1123.083997] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4b99dcc-2ebf-45d8-98c6-491c5b320005 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.091243] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1123.091243] env[65758]: value = "task-4661239" [ 1123.091243] env[65758]: _type = "Task" [ 1123.091243] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.101581] env[65758]: DEBUG nova.scheduler.client.report [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1123.106106] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661239, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.382797] env[65758]: DEBUG nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1123.397704] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661238, 'name': CreateVM_Task, 'duration_secs': 0.388373} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.397928] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1123.398488] env[65758]: WARNING neutronclient.v2_0.client [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1123.398919] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.399152] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.399548] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1123.399808] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0df21d3-592e-4a83-a19f-e428e5f049f2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.406057] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1123.406057] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522693d4-b20f-47b0-a166-7733b36677a6" [ 1123.406057] env[65758]: _type = "Task" [ 1123.406057] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.415870] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522693d4-b20f-47b0-a166-7733b36677a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.537038] env[65758]: DEBUG nova.compute.manager [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1123.602013] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661239, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06699} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.602392] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1123.603219] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622bfa0c-838d-49cc-9d73-ea9667a59010 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.607051] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.607750] env[65758]: DEBUG nova.compute.manager [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1123.611192] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.794s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.634483] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 8d0419d1-c301-4302-80c1-cd0fce7ccba4/8d0419d1-c301-4302-80c1-cd0fce7ccba4.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1123.635679] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f10bc253-43d2-400e-96f8-6d236bb48305 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.658252] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1123.658252] env[65758]: value = "task-4661240" [ 1123.658252] env[65758]: _type = "Task" [ 1123.658252] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.667490] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661240, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.906134] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.916873] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522693d4-b20f-47b0-a166-7733b36677a6, 'name': SearchDatastore_Task, 'duration_secs': 0.010618} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.917163] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.917401] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1123.917715] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.917885] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.918080] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1123.918358] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6f7b100-f72d-4887-b3e0-9bd0a6d71c17 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.928392] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1123.928566] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1123.929355] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7e5e417-fd1c-46fc-a929-3a609d3896a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.937718] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1123.937718] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52877ac0-ccd9-1b1a-98e7-739a9842e5af" [ 1123.937718] env[65758]: _type = "Task" [ 1123.937718] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.946109] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52877ac0-ccd9-1b1a-98e7-739a9842e5af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.058776] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.115999] env[65758]: DEBUG nova.compute.utils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1124.120790] env[65758]: DEBUG nova.compute.manager [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 1124.169755] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661240, 'name': ReconfigVM_Task, 'duration_secs': 0.281261} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.173519] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 8d0419d1-c301-4302-80c1-cd0fce7ccba4/8d0419d1-c301-4302-80c1-cd0fce7ccba4.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1124.174386] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebf996ea-22e2-4d98-a239-d2d4282b35f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.182089] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1124.182089] env[65758]: value = "task-4661242" [ 1124.182089] env[65758]: _type = "Task" [ 1124.182089] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.196417] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661242, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.298111] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab6bbc5-0b34-4cc7-95d5-8e95b26a984b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.307385] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc50b0e-6da4-43d5-8162-8e7135adf20d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.339217] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90dedb4-f2e3-4b67-8a8a-11c0083253d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.347542] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55236e58-62ef-4c62-b67d-4e5917289e95 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.363363] env[65758]: DEBUG nova.compute.provider_tree [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1124.448586] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52877ac0-ccd9-1b1a-98e7-739a9842e5af, 'name': SearchDatastore_Task, 'duration_secs': 0.012384} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.449420] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d6f9575-4790-4fc4-ad52-aedf425e64a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.455940] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1124.455940] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f29168-7912-5796-d054-0622f306f675" [ 1124.455940] env[65758]: _type = "Task" [ 1124.455940] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.464896] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f29168-7912-5796-d054-0622f306f675, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.621666] env[65758]: DEBUG nova.compute.manager [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1124.692528] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661242, 'name': Rename_Task, 'duration_secs': 0.158878} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.692817] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1124.693091] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c589b7d-0feb-49da-a632-2f06b3fe5b43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.701540] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1124.701540] env[65758]: value = "task-4661243" [ 1124.701540] env[65758]: _type = "Task" [ 1124.701540] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.711485] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661243, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.888154] env[65758]: ERROR nova.scheduler.client.report [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [req-a969f857-253f-4e27-a36e-d0acbb8ea948] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a969f857-253f-4e27-a36e-d0acbb8ea948"}]} [ 1124.904725] env[65758]: DEBUG nova.scheduler.client.report [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1124.920462] env[65758]: DEBUG nova.scheduler.client.report [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1124.920910] env[65758]: DEBUG nova.compute.provider_tree [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1124.933394] env[65758]: DEBUG nova.scheduler.client.report [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: None {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1124.952377] env[65758]: DEBUG nova.scheduler.client.report [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1124.967113] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f29168-7912-5796-d054-0622f306f675, 'name': SearchDatastore_Task, 'duration_secs': 0.009875} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.967391] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.967651] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d/ba16e0fe-6748-4d14-bb28-a65d63a2274d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1124.968021] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0cdd3e22-5576-465a-b807-4cfd153806f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.979207] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1124.979207] env[65758]: value = "task-4661244" [ 1124.979207] env[65758]: _type = "Task" [ 1124.979207] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.988291] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.122040] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16f314d-cfe4-4916-923a-184d94b8037b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.135063] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131e81f9-bf65-4b37-9884-1a791306db0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.170786] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967c8db2-1cb2-439c-a670-16b9c15d7787 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.180465] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c86c45c-2ba5-4b21-8570-c6e8073c6313 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.197650] env[65758]: DEBUG nova.compute.provider_tree [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1125.212625] env[65758]: DEBUG oslo_vmware.api [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661243, 'name': PowerOnVM_Task, 'duration_secs': 0.489567} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.213050] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1125.213127] env[65758]: INFO nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Took 7.93 seconds to spawn the instance on the hypervisor. [ 1125.213425] env[65758]: DEBUG nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1125.214276] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4c76d8-90bd-4424-9943-283abae5230c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.490694] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661244, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.631960] env[65758]: DEBUG nova.compute.manager [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1125.661184] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1125.661448] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.661605] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1125.661787] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.661919] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1125.662070] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1125.662284] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1125.662461] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1125.662638] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1125.662801] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1125.662971] env[65758]: DEBUG nova.virt.hardware [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1125.663881] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd192c7-005b-466b-96cb-3e97688ce074 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.673354] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d40b04-e1dd-4db3-a432-20b62ec169ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.688447] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1125.694073] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Creating folder: Project (e2986a435f054610b71845a04bfdea6a). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1125.694448] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-798e217d-645c-4cbb-8533-d0dad2e8fee4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.707803] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Created folder: Project (e2986a435f054610b71845a04bfdea6a) in parent group-v909763. [ 1125.708096] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Creating folder: Instances. Parent ref: group-v910056. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1125.708393] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23fe5b49-695c-4bad-bf1f-e0af9c03412d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.719949] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Created folder: Instances in parent group-v910056. [ 1125.720289] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1125.720525] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1125.720765] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28aa7890-814b-47b7-a1a0-ed67aea0c21b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.739439] env[65758]: DEBUG nova.scheduler.client.report [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 143 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1125.739541] env[65758]: DEBUG nova.compute.provider_tree [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 143 to 144 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1125.739735] env[65758]: DEBUG nova.compute.provider_tree [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1125.745506] env[65758]: INFO nova.compute.manager [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Took 14.24 seconds to build instance. [ 1125.749511] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1125.749511] env[65758]: value = "task-4661247" [ 1125.749511] env[65758]: _type = "Task" [ 1125.749511] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.761342] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661247, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.991027] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661244, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531741} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.991446] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d/ba16e0fe-6748-4d14-bb28-a65d63a2274d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1125.991535] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1125.991760] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a630097-e97d-419e-bb0f-03595318cd0e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.999054] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1125.999054] env[65758]: value = "task-4661248" [ 1125.999054] env[65758]: _type = "Task" [ 1125.999054] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.008628] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.251137] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9d149b24-6076-49d5-b37f-7504b8694b43 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.750s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.263344] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661247, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.478737] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.478931] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Cleaning up deleted instances with incomplete migration {{(pid=65758) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11954}} [ 1126.510151] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072797} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.510347] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1126.511159] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a2ce25-be34-45e5-84ce-e9fe2e7dc1aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.537539] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d/ba16e0fe-6748-4d14-bb28-a65d63a2274d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1126.537997] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b431ac1c-5bcb-43ae-99bc-b6ab5e4378c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.561816] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1126.561816] env[65758]: value = "task-4661249" [ 1126.561816] env[65758]: _type = "Task" [ 1126.561816] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.573144] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661249, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.733810] env[65758]: DEBUG nova.compute.manager [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1126.733810] env[65758]: DEBUG nova.compute.manager [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing instance network info cache due to event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1126.736121] env[65758]: DEBUG oslo_concurrency.lockutils [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.736121] env[65758]: DEBUG oslo_concurrency.lockutils [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.736121] env[65758]: DEBUG nova.network.neutron [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1126.751671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.140s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.754776] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.849s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.756724] env[65758]: INFO nova.compute.claims [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.770625] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661247, 'name': CreateVM_Task, 'duration_secs': 0.742482} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.771022] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1126.771427] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.771582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.771975] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1126.772194] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60973d10-ede4-4523-bff2-451fbda8e0a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.778148] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1126.778148] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52891dea-86ff-658a-295b-1e0eef3ff85d" [ 1126.778148] env[65758]: _type = "Task" [ 1126.778148] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.789264] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52891dea-86ff-658a-295b-1e0eef3ff85d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.073942] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661249, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.238745] env[65758]: WARNING neutronclient.v2_0.client [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1127.239559] env[65758]: WARNING openstack [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1127.239978] env[65758]: WARNING openstack [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1127.292301] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52891dea-86ff-658a-295b-1e0eef3ff85d, 'name': SearchDatastore_Task, 'duration_secs': 0.011353} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.297276] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.297554] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1127.297810] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.297982] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.298197] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1127.299222] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c02939b2-c0da-4e51-b78c-1bd9136f7c92 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.309476] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1127.310618] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1127.310618] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f13492f-3615-4e52-8a6b-9ec08cd9d22f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.317290] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1127.317290] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b99260-1655-f04e-ed26-9236dbb3ed40" [ 1127.317290] env[65758]: _type = "Task" [ 1127.317290] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.327974] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b99260-1655-f04e-ed26-9236dbb3ed40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.342764] env[65758]: INFO nova.scheduler.client.report [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted allocation for migration 0d160ead-0c7c-422d-976f-8988ce7ea93d [ 1127.414864] env[65758]: WARNING neutronclient.v2_0.client [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1127.415605] env[65758]: WARNING openstack [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1127.417012] env[65758]: WARNING openstack [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1127.539913] env[65758]: DEBUG nova.network.neutron [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updated VIF entry in instance network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1127.540298] env[65758]: DEBUG nova.network.neutron [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1127.574353] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661249, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.678732] env[65758]: INFO nova.compute.manager [None req-da02ff27-8cb5-4634-a029-a3048e0e0542 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Get console output [ 1127.678732] env[65758]: WARNING nova.virt.vmwareapi.driver [None req-da02ff27-8cb5-4634-a029-a3048e0e0542 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] The console log is missing. Check your VSPC configuration [ 1127.832489] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b99260-1655-f04e-ed26-9236dbb3ed40, 'name': SearchDatastore_Task, 'duration_secs': 0.010276} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.833362] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e77e9a08-68cf-4161-8009-580d49e8d4f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.839981] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1127.839981] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521e297e-0e60-4e82-b180-972da18e830b" [ 1127.839981] env[65758]: _type = "Task" [ 1127.839981] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.849365] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86af1f5e-ec81-483e-82b1-1f378b3aedaa tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.018s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.850478] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521e297e-0e60-4e82-b180-972da18e830b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.943787] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66513b2-98f7-4dcb-99b2-68921b78e120 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.953087] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e56771-fabd-4557-bd0f-666c5dd5ac69 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.986309] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2146b959-cbb8-40d6-82dc-8b329e1dfad2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.997539] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a573f0-1df2-40d4-9e35-51ab7d97a99d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.018177] env[65758]: DEBUG nova.compute.provider_tree [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.043401] env[65758]: DEBUG oslo_concurrency.lockutils [req-49b16b4f-c92e-4e9c-a59c-824de9168850 req-d590dcb2-7752-43d8-a3de-16e2e0db3f2d service nova] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.074289] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661249, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.351748] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521e297e-0e60-4e82-b180-972da18e830b, 'name': SearchDatastore_Task, 'duration_secs': 0.011006} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.352077] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.352397] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a81095fb-6fe8-4b24-b763-1da983978460/a81095fb-6fe8-4b24-b763-1da983978460.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1128.352680] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abe5f6c3-da25-436c-88de-b929a03e81e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.361572] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1128.361572] env[65758]: value = "task-4661251" [ 1128.361572] env[65758]: _type = "Task" [ 1128.361572] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.372788] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.522646] env[65758]: DEBUG nova.scheduler.client.report [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.574946] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661249, 'name': ReconfigVM_Task, 'duration_secs': 1.612318} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.575216] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d/ba16e0fe-6748-4d14-bb28-a65d63a2274d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1128.575976] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e5ca7a3-2eab-4803-b1df-d78c674c15d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.584474] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1128.584474] env[65758]: value = "task-4661252" [ 1128.584474] env[65758]: _type = "Task" [ 1128.584474] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.593855] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661252, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.767975] env[65758]: DEBUG nova.compute.manager [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received event network-changed-924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1128.768533] env[65758]: DEBUG nova.compute.manager [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing instance network info cache due to event network-changed-924f7463-7e8c-4f58-af04-46082cd691ed. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1128.768832] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Acquiring lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.769059] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Acquired lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.769476] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing network info cache for port 924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1128.872440] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488031} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.872754] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] a81095fb-6fe8-4b24-b763-1da983978460/a81095fb-6fe8-4b24-b763-1da983978460.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1128.872939] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1128.873239] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55f09eea-b3d0-4da5-a036-b9c5a45a9570 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.880957] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1128.880957] env[65758]: value = "task-4661253" [ 1128.880957] env[65758]: _type = "Task" [ 1128.880957] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.891896] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.982086] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.982494] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.028379] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.028943] env[65758]: DEBUG nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1129.032287] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.974s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.094602] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661252, 'name': Rename_Task, 'duration_secs': 0.165385} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.095037] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.095139] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2f79d93-af7a-4b95-85a1-29b6c7ce9ee3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.102903] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1129.102903] env[65758]: value = "task-4661254" [ 1129.102903] env[65758]: _type = "Task" [ 1129.102903] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.111864] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.271741] env[65758]: WARNING neutronclient.v2_0.client [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1129.272493] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.272848] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.391501] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067347} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.391832] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1129.392728] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962e5afc-948e-407b-a213-a02d4a39b258 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.418160] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] a81095fb-6fe8-4b24-b763-1da983978460/a81095fb-6fe8-4b24-b763-1da983978460.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1129.418391] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6a60f9d-8a1f-4f1c-bb46-6d8853d2ed50 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.433975] env[65758]: WARNING neutronclient.v2_0.client [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1129.434721] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.435115] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.452193] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1129.452193] env[65758]: value = "task-4661255" [ 1129.452193] env[65758]: _type = "Task" [ 1129.452193] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.461687] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661255, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.489434] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.529102] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updated VIF entry in instance network info cache for port 924f7463-7e8c-4f58-af04-46082cd691ed. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1129.529481] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [{"id": "924f7463-7e8c-4f58-af04-46082cd691ed", "address": "fa:16:3e:33:06:f7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924f7463-7e", "ovs_interfaceid": "924f7463-7e8c-4f58-af04-46082cd691ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1129.536669] env[65758]: DEBUG nova.compute.utils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1129.540583] env[65758]: INFO nova.compute.claims [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1129.545670] env[65758]: DEBUG nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1129.545868] env[65758]: DEBUG nova.network.neutron [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1129.546210] env[65758]: WARNING neutronclient.v2_0.client [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1129.546541] env[65758]: WARNING neutronclient.v2_0.client [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1129.547183] env[65758]: WARNING openstack [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1129.547535] env[65758]: WARNING openstack [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1129.592609] env[65758]: DEBUG nova.policy [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd07b5ba2c3ef430293fbf39148961763', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bad3e3c7054c424a800cb12e9c5dbb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1129.614642] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661254, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.880512] env[65758]: DEBUG nova.network.neutron [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Successfully created port: e06e56fe-7299-46f2-9238-9f1351c4ce06 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1129.962192] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661255, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.032237] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Releasing lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.032787] env[65758]: DEBUG nova.compute.manager [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received event network-changed-924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1130.032924] env[65758]: DEBUG nova.compute.manager [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing instance network info cache due to event network-changed-924f7463-7e8c-4f58-af04-46082cd691ed. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1130.033157] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Acquiring lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.033277] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Acquired lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.033434] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing network info cache for port 924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1130.046853] env[65758]: INFO nova.compute.resource_tracker [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating resource usage from migration 5faf2f1a-3a2e-4e55-88e6-d1fd72448b69 [ 1130.050242] env[65758]: DEBUG nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1130.114192] env[65758]: DEBUG oslo_vmware.api [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661254, 'name': PowerOnVM_Task, 'duration_secs': 0.540768} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.114531] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.114746] env[65758]: INFO nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Took 10.96 seconds to spawn the instance on the hypervisor. [ 1130.114988] env[65758]: DEBUG nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1130.115715] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed68219-f480-403a-ae98-995186b55b6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.208142] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d274ba33-6ae9-4cca-b9ee-a67a417627ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.215972] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2faaa4-4b5d-42ba-8a6d-01f4ef9c2a67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.246633] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0abed1-5f88-4896-9db3-33d733477a68 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.256013] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3eb7a86-dcce-4906-a78f-57cc88e8e10e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.272880] env[65758]: DEBUG nova.compute.provider_tree [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1130.463799] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661255, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.536753] env[65758]: WARNING neutronclient.v2_0.client [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1130.537504] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1130.537920] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1130.634575] env[65758]: INFO nova.compute.manager [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Took 17.49 seconds to build instance. [ 1130.798690] env[65758]: ERROR nova.scheduler.client.report [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [req-f592bba6-1e30-4115-916e-94765a9a79fb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f592bba6-1e30-4115-916e-94765a9a79fb"}]} [ 1130.817038] env[65758]: DEBUG nova.scheduler.client.report [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1130.832354] env[65758]: DEBUG nova.scheduler.client.report [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1130.832591] env[65758]: DEBUG nova.compute.provider_tree [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1130.844792] env[65758]: DEBUG nova.scheduler.client.report [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: None {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1130.867157] env[65758]: DEBUG nova.scheduler.client.report [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1130.965923] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661255, 'name': ReconfigVM_Task, 'duration_secs': 1.102294} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.967690] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Reconfigured VM instance instance-00000069 to attach disk [datastore1] a81095fb-6fe8-4b24-b763-1da983978460/a81095fb-6fe8-4b24-b763-1da983978460.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1130.967690] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b885856-9c3d-4884-8bee-691c22af0746 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.975063] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1130.975063] env[65758]: value = "task-4661256" [ 1130.975063] env[65758]: _type = "Task" [ 1130.975063] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.986448] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661256, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.038480] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393aa891-0e0e-4214-a7d3-e6c3e0d12e0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.048208] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b585e4-d9a9-4b17-9f5d-437622f0e61b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.081907] env[65758]: DEBUG nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1131.085049] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bf1513-6a65-436e-91f4-39232679691b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.095009] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667342ed-401e-4ceb-851b-938a6258f25d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.111340] env[65758]: DEBUG nova.compute.provider_tree [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1131.122272] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1131.122579] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1131.122698] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1131.122879] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1131.123033] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1131.123178] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1131.123380] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1131.123533] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1131.123695] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1131.123846] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1131.124024] env[65758]: DEBUG nova.virt.hardware [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1131.124877] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517e1864-b00e-4f1c-b775-dc903d4709de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.133601] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117fb69a-9a18-42d4-ad17-b36c74dc47d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.137984] env[65758]: DEBUG oslo_concurrency.lockutils [None req-44da86d6-daf3-4b3f-9125-57442fc7f502 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.998s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.348374] env[65758]: DEBUG nova.compute.manager [req-cc15b5be-e673-4447-99d5-d9576f302b88 req-1eb11df4-3339-4493-a1cc-78942ff519af service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Received event network-vif-plugged-e06e56fe-7299-46f2-9238-9f1351c4ce06 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1131.348594] env[65758]: DEBUG oslo_concurrency.lockutils [req-cc15b5be-e673-4447-99d5-d9576f302b88 req-1eb11df4-3339-4493-a1cc-78942ff519af service nova] Acquiring lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.348897] env[65758]: DEBUG oslo_concurrency.lockutils [req-cc15b5be-e673-4447-99d5-d9576f302b88 req-1eb11df4-3339-4493-a1cc-78942ff519af service nova] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.349103] env[65758]: DEBUG oslo_concurrency.lockutils [req-cc15b5be-e673-4447-99d5-d9576f302b88 req-1eb11df4-3339-4493-a1cc-78942ff519af service nova] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.349273] env[65758]: DEBUG nova.compute.manager [req-cc15b5be-e673-4447-99d5-d9576f302b88 req-1eb11df4-3339-4493-a1cc-78942ff519af service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] No waiting events found dispatching network-vif-plugged-e06e56fe-7299-46f2-9238-9f1351c4ce06 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1131.349931] env[65758]: WARNING nova.compute.manager [req-cc15b5be-e673-4447-99d5-d9576f302b88 req-1eb11df4-3339-4493-a1cc-78942ff519af service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Received unexpected event network-vif-plugged-e06e56fe-7299-46f2-9238-9f1351c4ce06 for instance with vm_state building and task_state spawning. [ 1131.418899] env[65758]: DEBUG nova.network.neutron [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Successfully updated port: e06e56fe-7299-46f2-9238-9f1351c4ce06 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1131.487135] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661256, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.649076] env[65758]: DEBUG nova.scheduler.client.report [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1131.649365] env[65758]: DEBUG nova.compute.provider_tree [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 145 to 146 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1131.649682] env[65758]: DEBUG nova.compute.provider_tree [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1131.922358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.922358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.922597] env[65758]: DEBUG nova.network.neutron [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1131.967273] env[65758]: WARNING neutronclient.v2_0.client [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1131.967947] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1131.968385] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1131.989590] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661256, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.102430] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updated VIF entry in instance network info cache for port 924f7463-7e8c-4f58-af04-46082cd691ed. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1132.102785] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [{"id": "924f7463-7e8c-4f58-af04-46082cd691ed", "address": "fa:16:3e:33:06:f7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924f7463-7e", "ovs_interfaceid": "924f7463-7e8c-4f58-af04-46082cd691ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1132.155429] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.123s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.155721] env[65758]: INFO nova.compute.manager [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Migrating [ 1132.162454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.673s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.162671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.162827] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1132.166608] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dfc7d5-5b46-49a1-a258-f07c62f6754a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.180381] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcd45ef-40ee-4f61-bf03-e6003aa0fca9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.199728] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60cb8a5-da21-4f0e-82f9-3757909ccf81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.209281] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a982754-847e-44a3-9cf9-ec55f01c63d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.241449] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178854MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1132.241628] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.241868] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.426241] env[65758]: WARNING openstack [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1132.427214] env[65758]: WARNING openstack [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1132.465671] env[65758]: DEBUG nova.network.neutron [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1132.488363] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661256, 'name': Rename_Task, 'duration_secs': 1.154092} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.491285] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1132.491751] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfb5acc3-833a-4cee-95c7-da545ed904e5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.499744] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1132.499744] env[65758]: value = "task-4661257" [ 1132.499744] env[65758]: _type = "Task" [ 1132.499744] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.509197] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661257, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.552171] env[65758]: WARNING neutronclient.v2_0.client [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1132.553626] env[65758]: WARNING openstack [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1132.553626] env[65758]: WARNING openstack [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1132.608422] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Releasing lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.608956] env[65758]: DEBUG nova.compute.manager [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1132.609305] env[65758]: DEBUG nova.compute.manager [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing instance network info cache due to event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1132.609584] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.609764] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.609939] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1132.650575] env[65758]: DEBUG nova.network.neutron [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance_info_cache with network_info: [{"id": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "address": "fa:16:3e:12:8a:30", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06e56fe-72", "ovs_interfaceid": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1132.676038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.676038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.676038] env[65758]: DEBUG nova.network.neutron [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1133.014321] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661257, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.113926] env[65758]: WARNING neutronclient.v2_0.client [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1133.114032] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1133.114531] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1133.154550] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.155057] env[65758]: DEBUG nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Instance network_info: |[{"id": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "address": "fa:16:3e:12:8a:30", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06e56fe-72", "ovs_interfaceid": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1133.158338] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:8a:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e06e56fe-7299-46f2-9238-9f1351c4ce06', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1133.166860] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1133.167995] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1133.167995] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd4ec5a2-3598-4fe7-a634-32f9c7f5357a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.183987] env[65758]: WARNING neutronclient.v2_0.client [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1133.184707] env[65758]: WARNING openstack [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1133.185073] env[65758]: WARNING openstack [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1133.199156] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.199156] env[65758]: value = "task-4661258" [ 1133.199156] env[65758]: _type = "Task" [ 1133.199156] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.208277] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661258, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.253063] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Applying migration context for instance 47cebd84-f9a1-4997-96aa-c76c5faa8c81 as it has an incoming, in-progress migration 5faf2f1a-3a2e-4e55-88e6-d1fd72448b69. Migration status is pre-migrating {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1133.255371] env[65758]: INFO nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating resource usage from migration 5faf2f1a-3a2e-4e55-88e6-d1fd72448b69 [ 1133.280273] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 37aadd44-79e8-4479-862f-265549c9d802 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.280273] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 31b7d1ee-58c1-47f3-a862-0bc5cb17addc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.280461] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance afc1eb16-c275-4b3b-a7fe-9938d2241e24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.280685] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 149655f8-fcf5-4cfe-ab96-1171b9d3b550 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.281632] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 62ae50af-ff52-4084-8161-1a650eff5247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.281632] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 8d0419d1-c301-4302-80c1-cd0fce7ccba4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.281632] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ba16e0fe-6748-4d14-bb28-a65d63a2274d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.281632] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a81095fb-6fe8-4b24-b763-1da983978460 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.281632] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance b5bbff6b-42e9-4938-b4b3-05a9d5826d1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.281632] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Migration 5faf2f1a-3a2e-4e55-88e6-d1fd72448b69 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 1133.281890] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 47cebd84-f9a1-4997-96aa-c76c5faa8c81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1133.282020] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1133.282270] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2752MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '10', 'num_vm_active': '8', 'num_task_None': '7', 'num_os_type_None': '10', 'num_proj_e114eef3998848699a9a086fee86db29': '3', 'io_workload': '3', 'num_proj_e2440f1694fe4b87a9827f6653ff2e4c': '1', 'num_task_resize_prep': '1', 'num_proj_c4c2ab2b80c04c38bfb4c7cafac87fe6': '1', 'num_proj_64ffccae76ed401582dd915ae5f87922': '2', 'num_proj_4095654557a34bb0907071aedb3bb678': '1', 'num_vm_building': '2', 'num_task_spawning': '2', 'num_proj_e2986a435f054610b71845a04bfdea6a': '1', 'num_proj_bad3e3c7054c424a800cb12e9c5dbb31': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1133.325980] env[65758]: WARNING neutronclient.v2_0.client [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1133.326679] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1133.327102] env[65758]: WARNING openstack [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1133.371309] env[65758]: WARNING neutronclient.v2_0.client [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1133.372100] env[65758]: WARNING openstack [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1133.372649] env[65758]: WARNING openstack [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1133.397418] env[65758]: DEBUG nova.compute.manager [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Received event network-changed-e06e56fe-7299-46f2-9238-9f1351c4ce06 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1133.397418] env[65758]: DEBUG nova.compute.manager [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Refreshing instance network info cache due to event network-changed-e06e56fe-7299-46f2-9238-9f1351c4ce06. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1133.397531] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Acquiring lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.397604] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Acquired lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.397770] env[65758]: DEBUG nova.network.neutron [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Refreshing network info cache for port e06e56fe-7299-46f2-9238-9f1351c4ce06 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1133.434863] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updated VIF entry in instance network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1133.435241] env[65758]: DEBUG nova.network.neutron [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1133.468101] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af4007d-3df6-4e39-8147-a0e1b5ef6063 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.473934] env[65758]: DEBUG nova.network.neutron [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance_info_cache with network_info: [{"id": "67e62b92-0851-4648-b7d7-181b274c8325", "address": "fa:16:3e:23:1f:98", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e62b92-08", "ovs_interfaceid": "67e62b92-0851-4648-b7d7-181b274c8325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1133.478973] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12a5552-a6c9-4eac-9b14-4c5d00cf4f35 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.517069] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa643f8-cf0e-4c07-b1f0-fad3cacb4181 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.528687] env[65758]: DEBUG oslo_vmware.api [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661257, 'name': PowerOnVM_Task, 'duration_secs': 0.823964} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.529901] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30e72b8-bbfc-4231-a2c7-858ff9f2787e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.533722] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1133.533927] env[65758]: INFO nova.compute.manager [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Took 7.90 seconds to spawn the instance on the hypervisor. [ 1133.534125] env[65758]: DEBUG nova.compute.manager [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1133.535167] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d88c245-d0c9-472c-b9ad-9a2af6195a3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.550339] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.710102] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661258, 'name': CreateVM_Task, 'duration_secs': 0.378759} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.710301] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1133.710810] env[65758]: WARNING neutronclient.v2_0.client [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1133.711235] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.711389] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.711716] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1133.711972] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa4c3c44-746e-488e-a590-2a4f6d90928d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.717203] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1133.717203] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52aa9953-ba64-ebce-d338-2d0f28dacfda" [ 1133.717203] env[65758]: _type = "Task" [ 1133.717203] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.726020] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52aa9953-ba64-ebce-d338-2d0f28dacfda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.900074] env[65758]: WARNING neutronclient.v2_0.client [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1133.900914] env[65758]: WARNING openstack [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1133.901375] env[65758]: WARNING openstack [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1133.941312] env[65758]: DEBUG oslo_concurrency.lockutils [req-dc1fe9c4-29a2-4ca5-9c3b-d2d6e14e3a06 req-f6c80bed-af88-4981-a57d-304f9037da8e service nova] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.980283] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.057508] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.069990] env[65758]: INFO nova.compute.manager [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Took 15.40 seconds to build instance. [ 1134.091072] env[65758]: WARNING neutronclient.v2_0.client [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1134.091237] env[65758]: WARNING openstack [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1134.091583] env[65758]: WARNING openstack [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1134.179869] env[65758]: DEBUG nova.network.neutron [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updated VIF entry in instance network info cache for port e06e56fe-7299-46f2-9238-9f1351c4ce06. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1134.180340] env[65758]: DEBUG nova.network.neutron [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance_info_cache with network_info: [{"id": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "address": "fa:16:3e:12:8a:30", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06e56fe-72", "ovs_interfaceid": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1134.230823] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52aa9953-ba64-ebce-d338-2d0f28dacfda, 'name': SearchDatastore_Task, 'duration_secs': 0.010319} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.231186] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.231431] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.231664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.231807] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.231983] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1134.232283] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ba31485-6508-423e-a1bb-8ba39e3dbcbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.242978] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1134.243274] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1134.244135] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b016a560-8a9c-44ec-8861-6b7d8566eeb9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.251962] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1134.251962] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5204939c-faab-63f2-9d95-61b06bab5524" [ 1134.251962] env[65758]: _type = "Task" [ 1134.251962] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.260543] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5204939c-faab-63f2-9d95-61b06bab5524, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.568402] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1134.568604] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.327s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.568803] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.568941] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Cleaning up deleted instances {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11916}} [ 1134.572103] env[65758]: DEBUG oslo_concurrency.lockutils [None req-dbf60c63-1698-4cf3-83ec-d8023d537fe1 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "a81095fb-6fe8-4b24-b763-1da983978460" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.906s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.685059] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Releasing lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.685403] env[65758]: DEBUG nova.compute.manager [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Received event network-changed-b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1134.685597] env[65758]: DEBUG nova.compute.manager [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Refreshing instance network info cache due to event network-changed-b91df992-11ae-4d37-af24-380860864b45. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1134.685806] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Acquiring lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.685944] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Acquired lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.686111] env[65758]: DEBUG nova.network.neutron [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Refreshing network info cache for port b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1134.765049] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5204939c-faab-63f2-9d95-61b06bab5524, 'name': SearchDatastore_Task, 'duration_secs': 0.010221} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.765910] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e234ce60-b523-4308-afa2-5e37785bd25b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.772346] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1134.772346] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52256a1b-2ac8-fe69-eb1a-69739c099d4b" [ 1134.772346] env[65758]: _type = "Task" [ 1134.772346] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.781471] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52256a1b-2ac8-fe69-eb1a-69739c099d4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.027234] env[65758]: INFO nova.compute.manager [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Rebuilding instance [ 1135.086154] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] There are 62 instances to clean {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11925}} [ 1135.086354] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 8f7c865d-1207-4300-b721-25b196f7a2f9] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1135.190190] env[65758]: WARNING neutronclient.v2_0.client [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1135.190848] env[65758]: WARNING openstack [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1135.191213] env[65758]: WARNING openstack [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1135.200318] env[65758]: DEBUG nova.compute.manager [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1135.201172] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96e6894-2c00-4e82-82e8-6553e61af7d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.283515] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52256a1b-2ac8-fe69-eb1a-69739c099d4b, 'name': SearchDatastore_Task, 'duration_secs': 0.010986} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.283800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.284092] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] b5bbff6b-42e9-4938-b4b3-05a9d5826d1c/b5bbff6b-42e9-4938-b4b3-05a9d5826d1c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1135.284383] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51926934-8fd5-4fb8-920d-7a0940727d69 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.292934] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1135.292934] env[65758]: value = "task-4661259" [ 1135.292934] env[65758]: _type = "Task" [ 1135.292934] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.302190] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.392736] env[65758]: WARNING neutronclient.v2_0.client [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1135.393551] env[65758]: WARNING openstack [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1135.393957] env[65758]: WARNING openstack [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1135.478834] env[65758]: DEBUG nova.network.neutron [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updated VIF entry in instance network info cache for port b91df992-11ae-4d37-af24-380860864b45. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1135.479222] env[65758]: DEBUG nova.network.neutron [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [{"id": "b91df992-11ae-4d37-af24-380860864b45", "address": "fa:16:3e:13:ee:a2", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb91df992-11", "ovs_interfaceid": "b91df992-11ae-4d37-af24-380860864b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1135.495022] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df3ef51-968b-4a12-9062-d0f15805da69 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.516425] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance '47cebd84-f9a1-4997-96aa-c76c5faa8c81' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1135.590714] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 6e334cc1-a6d7-4990-b65d-5e0e69a1e4dc] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1135.804505] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661259, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462084} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.804806] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] b5bbff6b-42e9-4938-b4b3-05a9d5826d1c/b5bbff6b-42e9-4938-b4b3-05a9d5826d1c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1135.804991] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1135.805279] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d27e72ab-92bb-4e4f-b97b-4e3248ba173e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.812917] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1135.812917] env[65758]: value = "task-4661260" [ 1135.812917] env[65758]: _type = "Task" [ 1135.812917] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.821165] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661260, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.982065] env[65758]: DEBUG oslo_concurrency.lockutils [req-c8386f68-3af2-4474-996c-1f026ed5f488 req-da9b378a-35e0-4cff-9c18-3d865fcb30b8 service nova] Releasing lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.022861] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.023465] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-093a7bd2-3dc1-4bc1-a9fb-5fea729acb81 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.041942] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1136.041942] env[65758]: value = "task-4661261" [ 1136.041942] env[65758]: _type = "Task" [ 1136.041942] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.057013] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.095125] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a014debf-2f16-4b30-af78-27a6751060de] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1136.215377] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.215881] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55d4cce5-c566-408a-920f-874bf7bd7426 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.223585] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1136.223585] env[65758]: value = "task-4661262" [ 1136.223585] env[65758]: _type = "Task" [ 1136.223585] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.233391] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.323799] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661260, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076886} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.324103] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1136.324999] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3363d06-63ef-4d4d-8ed5-cc4216fdeaab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.351216] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] b5bbff6b-42e9-4938-b4b3-05a9d5826d1c/b5bbff6b-42e9-4938-b4b3-05a9d5826d1c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.351604] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5ae8018-7266-43ef-ac89-1e83e9ed566e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.376194] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1136.376194] env[65758]: value = "task-4661263" [ 1136.376194] env[65758]: _type = "Task" [ 1136.376194] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.386105] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661263, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.553163] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661261, 'name': PowerOffVM_Task, 'duration_secs': 0.341914} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.553468] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.553649] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance '47cebd84-f9a1-4997-96aa-c76c5faa8c81' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.599065] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: ade1d760-e3e7-49c8-ba9d-b4829ca60841] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1136.737788] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661262, 'name': PowerOffVM_Task, 'duration_secs': 0.162288} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.738204] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.739148] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1136.740328] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2c3b05-3a9d-46b0-9383-e7312ff13d4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.749834] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1136.750200] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d654593-c4e5-4400-9c1e-6cc776443fcd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.776968] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1136.777225] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1136.777403] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Deleting the datastore file [datastore1] a81095fb-6fe8-4b24-b763-1da983978460 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.777686] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae246cee-c619-4dfd-be5c-68d863e3e846 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.786062] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1136.786062] env[65758]: value = "task-4661265" [ 1136.786062] env[65758]: _type = "Task" [ 1136.786062] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.794608] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.890232] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661263, 'name': ReconfigVM_Task, 'duration_secs': 0.293934} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.890961] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Reconfigured VM instance instance-0000006a to attach disk [datastore2] b5bbff6b-42e9-4938-b4b3-05a9d5826d1c/b5bbff6b-42e9-4938-b4b3-05a9d5826d1c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.891203] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f687a748-c7c3-4d67-a414-146560a6be3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.899935] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1136.899935] env[65758]: value = "task-4661266" [ 1136.899935] env[65758]: _type = "Task" [ 1136.899935] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.910932] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661266, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.060524] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1137.060779] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1137.060928] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1137.061126] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1137.061269] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1137.061407] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1137.061608] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1137.061762] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1137.061919] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1137.062089] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1137.062261] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1137.067352] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b6bcd35-b63b-47c5-8520-438ce2e1f7c8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.086239] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1137.086239] env[65758]: value = "task-4661267" [ 1137.086239] env[65758]: _type = "Task" [ 1137.086239] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.097861] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661267, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.102567] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: b50b7e64-6f7f-4abc-a4b1-93408a723298] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1137.296876] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109438} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.297249] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.297322] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1137.297518] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1137.412058] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661266, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.598321] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661267, 'name': ReconfigVM_Task, 'duration_secs': 0.50849} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.598659] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance '47cebd84-f9a1-4997-96aa-c76c5faa8c81' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1137.606360] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 9ec1ff52-7fbd-4530-9377-caeff103360b] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1137.912739] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661266, 'name': Rename_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.105929] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1138.106157] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.106272] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1138.106456] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.106594] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1138.106735] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1138.106926] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1138.107091] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1138.107284] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1138.107405] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1138.107566] env[65758]: DEBUG nova.virt.hardware [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1138.112756] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfiguring VM instance instance-00000063 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1138.113165] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: fe6f2a15-f42a-4f63-8dfa-175adadf5c02] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1138.114871] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18855e13-4bc3-41a7-95d2-07cc3790ef41 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.136345] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1138.136345] env[65758]: value = "task-4661268" [ 1138.136345] env[65758]: _type = "Task" [ 1138.136345] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.146141] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661268, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.335036] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1138.335378] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.335378] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1138.335547] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.335722] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1138.335894] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1138.336116] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1138.336276] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1138.336437] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1138.336633] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1138.336809] env[65758]: DEBUG nova.virt.hardware [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1138.338047] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6b319c-ca06-49ba-b9e0-ca0cbc3d68c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.347666] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fd8994-26f9-4e67-9742-4ba050a1cd09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.366896] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.373519] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1138.374113] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.374347] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60c588bb-06fd-47f1-9554-a9e81f1b14c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.392374] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.392374] env[65758]: value = "task-4661269" [ 1138.392374] env[65758]: _type = "Task" [ 1138.392374] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.402101] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661269, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.411852] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661266, 'name': Rename_Task, 'duration_secs': 1.148597} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.412229] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1138.412489] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-013ff47d-8852-4f86-a400-e73b8317f6d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.421015] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1138.421015] env[65758]: value = "task-4661270" [ 1138.421015] env[65758]: _type = "Task" [ 1138.421015] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.433419] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.630145] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 95509bbe-5aaf-471f-97b3-8a3085797568] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1138.647070] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661268, 'name': ReconfigVM_Task, 'duration_secs': 0.159035} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.647485] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfigured VM instance instance-00000063 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1138.648403] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7f7737-d372-4883-93ae-52af8db3a8db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.674435] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81/47cebd84-f9a1-4997-96aa-c76c5faa8c81.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.675879] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a623cd24-d24e-4f4a-964b-0fc3f700172e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.701801] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1138.701801] env[65758]: value = "task-4661271" [ 1138.701801] env[65758]: _type = "Task" [ 1138.701801] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.714804] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661271, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.841531] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.841813] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.905614] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661269, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.935024] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661270, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.134023] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 5fc4f1b8-9024-4155-b56d-56a8d08f0259] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1139.212069] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661271, 'name': ReconfigVM_Task, 'duration_secs': 0.323883} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.212366] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81/47cebd84-f9a1-4997-96aa-c76c5faa8c81.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1139.212598] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance '47cebd84-f9a1-4997-96aa-c76c5faa8c81' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1139.345146] env[65758]: INFO nova.compute.manager [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Detaching volume 584fc235-4162-403c-abe8-2188f52e0331 [ 1139.383207] env[65758]: INFO nova.virt.block_device [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Attempting to driver detach volume 584fc235-4162-403c-abe8-2188f52e0331 from mountpoint /dev/sdb [ 1139.383464] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1139.383677] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910042', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'name': 'volume-584fc235-4162-403c-abe8-2188f52e0331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'afc1eb16-c275-4b3b-a7fe-9938d2241e24', 'attached_at': '', 'detached_at': '', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'serial': '584fc235-4162-403c-abe8-2188f52e0331'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1139.385337] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9427c5ac-ceb4-461a-ad2d-2b585d6eff47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.412599] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef22354-6517-4765-98f8-f1bbe463bb75 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.420587] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661269, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.425386] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225152b4-0cb0-47d0-b0db-a0a08e8db751 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.449202] env[65758]: DEBUG oslo_vmware.api [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661270, 'name': PowerOnVM_Task, 'duration_secs': 0.574942} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.450017] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cc1c47-93f2-4b2a-8e76-b2331e71cd8e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.452604] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1139.452805] env[65758]: INFO nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1139.452979] env[65758]: DEBUG nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1139.454115] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9df2b85-a2ba-4394-8bee-f1851946d246 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.474207] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The volume has not been displaced from its original location: [datastore1] volume-584fc235-4162-403c-abe8-2188f52e0331/volume-584fc235-4162-403c-abe8-2188f52e0331.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1139.479409] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1139.479964] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df4d7a40-4d4e-4e56-8a35-e6ff29a02c6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.499442] env[65758]: DEBUG oslo_vmware.api [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1139.499442] env[65758]: value = "task-4661272" [ 1139.499442] env[65758]: _type = "Task" [ 1139.499442] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.508932] env[65758]: DEBUG oslo_vmware.api [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661272, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.637370] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 37bae4b3-6959-4f44-8600-26a4f859103c] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1139.719476] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a695df7-00ab-4862-87ff-d92ad6da1738 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.741089] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b26418c-b468-4a31-ae9b-a3eb45a4739a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.760541] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance '47cebd84-f9a1-4997-96aa-c76c5faa8c81' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1139.919614] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661269, 'name': CreateVM_Task, 'duration_secs': 1.339487} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.919816] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1139.920228] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.920383] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.920717] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1139.920972] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac6fa65b-5bd2-4b82-bdd0-c0c7ded6cf26 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.926099] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1139.926099] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52b22680-1b65-1628-6388-0179a63e292f" [ 1139.926099] env[65758]: _type = "Task" [ 1139.926099] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.934606] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b22680-1b65-1628-6388-0179a63e292f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.988308] env[65758]: INFO nova.compute.manager [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Took 16.10 seconds to build instance. [ 1140.009412] env[65758]: DEBUG oslo_vmware.api [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661272, 'name': ReconfigVM_Task, 'duration_secs': 0.416728} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.010342] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1140.015381] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2d8032c-8231-44ae-a3d7-96287eafb6b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.033083] env[65758]: DEBUG oslo_vmware.api [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1140.033083] env[65758]: value = "task-4661273" [ 1140.033083] env[65758]: _type = "Task" [ 1140.033083] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.042304] env[65758]: DEBUG oslo_vmware.api [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.140768] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 0ce11868-fee2-40d3-9433-7bc398a1f756] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1140.267591] env[65758]: WARNING neutronclient.v2_0.client [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1140.267908] env[65758]: WARNING neutronclient.v2_0.client [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1140.313231] env[65758]: DEBUG nova.network.neutron [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Port 67e62b92-0851-4648-b7d7-181b274c8325 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 1140.437446] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52b22680-1b65-1628-6388-0179a63e292f, 'name': SearchDatastore_Task, 'duration_secs': 0.012437} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.437777] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.438023] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.438260] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.438404] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.438576] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.438844] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8696c78b-1d19-44f4-86e0-429553dc18f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.448679] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.448868] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1140.449632] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e96d7df0-a508-4ff6-a11f-d4a2878560a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.455547] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1140.455547] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e0bd94-92a5-4f1a-6a42-18eaa240b3d0" [ 1140.455547] env[65758]: _type = "Task" [ 1140.455547] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.464331] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e0bd94-92a5-4f1a-6a42-18eaa240b3d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.490044] env[65758]: DEBUG oslo_concurrency.lockutils [None req-86a3943e-70e0-4747-9d96-788fec653037 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.610s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.543758] env[65758]: DEBUG oslo_vmware.api [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661273, 'name': ReconfigVM_Task, 'duration_secs': 0.155288} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.544135] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910042', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'name': 'volume-584fc235-4162-403c-abe8-2188f52e0331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'afc1eb16-c275-4b3b-a7fe-9938d2241e24', 'attached_at': '', 'detached_at': '', 'volume_id': '584fc235-4162-403c-abe8-2188f52e0331', 'serial': '584fc235-4162-403c-abe8-2188f52e0331'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1140.645034] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 6981b99e-8e9f-459a-b356-9ed726c268ed] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1140.967936] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e0bd94-92a5-4f1a-6a42-18eaa240b3d0, 'name': SearchDatastore_Task, 'duration_secs': 0.010403} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.968708] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-149eabe9-db48-4326-9004-c97e0323127b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.974830] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1140.974830] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5207ecd7-3852-ae34-7c52-11314ece40e9" [ 1140.974830] env[65758]: _type = "Task" [ 1140.974830] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.983593] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5207ecd7-3852-ae34-7c52-11314ece40e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.096334] env[65758]: DEBUG nova.objects.instance [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'flavor' on Instance uuid afc1eb16-c275-4b3b-a7fe-9938d2241e24 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.148496] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a6ed7451-7b59-4ed9-8fb7-871d6107a272] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1141.335660] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.335871] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.336028] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.486460] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5207ecd7-3852-ae34-7c52-11314ece40e9, 'name': SearchDatastore_Task, 'duration_secs': 0.031047} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.487029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.487085] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a81095fb-6fe8-4b24-b763-1da983978460/a81095fb-6fe8-4b24-b763-1da983978460.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1141.487518] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6d1c48c-3a78-4185-a21e-db48fe129859 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.496041] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1141.496041] env[65758]: value = "task-4661274" [ 1141.496041] env[65758]: _type = "Task" [ 1141.496041] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.504399] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661274, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.651491] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: e41dffcd-f1ae-4c6c-9bfe-c14e6fb33f6a] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1141.795074] env[65758]: DEBUG nova.compute.manager [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1142.007716] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661274, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.104117] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fec98db5-cd49-4881-b1b5-4deef6797bc4 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.262s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.154536] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: fcb795c2-dd13-458a-a71e-1c9e4fdc5e06] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1142.315592] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.315931] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.342291] env[65758]: WARNING neutronclient.v2_0.client [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1142.384388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.384585] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.384765] env[65758]: DEBUG nova.network.neutron [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1142.508304] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661274, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561752} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.508594] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a81095fb-6fe8-4b24-b763-1da983978460/a81095fb-6fe8-4b24-b763-1da983978460.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1142.508838] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1142.509083] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9acddeb-3be4-4655-83ac-1bd7d312ba63 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.518660] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1142.518660] env[65758]: value = "task-4661275" [ 1142.518660] env[65758]: _type = "Task" [ 1142.518660] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.528516] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.657816] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 76ec31e6-65c2-4290-9ec0-b274be95baa4] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1142.821492] env[65758]: INFO nova.compute.claims [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1142.887342] env[65758]: WARNING neutronclient.v2_0.client [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1142.887896] env[65758]: WARNING openstack [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1142.888275] env[65758]: WARNING openstack [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1143.030836] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069158} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.031221] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1143.032741] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d5565b-be4d-41d0-a710-787427eca345 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.055837] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] a81095fb-6fe8-4b24-b763-1da983978460/a81095fb-6fe8-4b24-b763-1da983978460.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1143.056223] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80ad3019-804f-4818-9a01-e4a144596a46 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.073083] env[65758]: WARNING neutronclient.v2_0.client [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1143.073778] env[65758]: WARNING openstack [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1143.074148] env[65758]: WARNING openstack [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1143.083789] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1143.083789] env[65758]: value = "task-4661276" [ 1143.083789] env[65758]: _type = "Task" [ 1143.083789] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.096200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.096491] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.096710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.096937] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.097153] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.098805] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.099468] env[65758]: INFO nova.compute.manager [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Terminating instance [ 1143.162043] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 33098961-060f-4503-a805-6ae7351b45ea] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1143.169945] env[65758]: DEBUG nova.network.neutron [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance_info_cache with network_info: [{"id": "67e62b92-0851-4648-b7d7-181b274c8325", "address": "fa:16:3e:23:1f:98", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e62b92-08", "ovs_interfaceid": "67e62b92-0851-4648-b7d7-181b274c8325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1143.327887] env[65758]: INFO nova.compute.resource_tracker [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating resource usage from migration df917240-8170-4ba6-ad2c-6a6ffd6eb131 [ 1143.361456] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-62ae50af-ff52-4084-8161-1a650eff5247-8964bfa9-6690-403d-9936-940d8087617c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.361456] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-62ae50af-ff52-4084-8161-1a650eff5247-8964bfa9-6690-403d-9936-940d8087617c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.361711] env[65758]: DEBUG nova.objects.instance [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'flavor' on Instance uuid 62ae50af-ff52-4084-8161-1a650eff5247 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.518138] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e5a56a-56d6-492a-a444-dee05e823cfe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.526630] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3131d5-ad26-4fb8-bb70-1ec60ea3bca3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.558682] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f5f2f0-8248-4333-9104-99ed023d310b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.567792] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7ac28d-a252-44a8-9366-08a9720f5476 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.583130] env[65758]: DEBUG nova.compute.provider_tree [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.594244] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.603682] env[65758]: DEBUG nova.compute.manager [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1143.603898] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1143.605205] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97335790-9354-4678-8800-e710dbcf352b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.616041] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.616329] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c6ee9f9-8e9e-467b-87a9-b90a5811afa9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.623876] env[65758]: DEBUG oslo_vmware.api [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1143.623876] env[65758]: value = "task-4661277" [ 1143.623876] env[65758]: _type = "Task" [ 1143.623876] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.634367] env[65758]: DEBUG oslo_vmware.api [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.666803] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 63b744d2-541a-42e3-9717-b06a4459fd50] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1143.672747] env[65758]: DEBUG oslo_concurrency.lockutils [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.866270] env[65758]: WARNING neutronclient.v2_0.client [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1143.867217] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1143.867674] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1144.014189] env[65758]: DEBUG nova.objects.instance [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'pci_requests' on Instance uuid 62ae50af-ff52-4084-8161-1a650eff5247 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.090014] env[65758]: DEBUG nova.scheduler.client.report [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.099474] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661276, 'name': ReconfigVM_Task, 'duration_secs': 0.790469} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.099747] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Reconfigured VM instance instance-00000069 to attach disk [datastore2] a81095fb-6fe8-4b24-b763-1da983978460/a81095fb-6fe8-4b24-b763-1da983978460.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1144.100553] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7424c8ff-af90-4a79-9ed3-da3b67a5b01b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.109146] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1144.109146] env[65758]: value = "task-4661278" [ 1144.109146] env[65758]: _type = "Task" [ 1144.109146] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.118020] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661278, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.133335] env[65758]: DEBUG oslo_vmware.api [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661277, 'name': PowerOffVM_Task, 'duration_secs': 0.437107} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.133567] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1144.133715] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1144.133977] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99af2047-26aa-4c32-9d59-c832f4cb8951 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.171401] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: d5d27a5c-afe4-49a1-a385-0a8f625b5a1e] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1144.198414] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70020ff7-ac95-47f3-8c75-92e20fae22db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.203326] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1144.203547] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1144.203721] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleting the datastore file [datastore1] afc1eb16-c275-4b3b-a7fe-9938d2241e24 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.204415] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f571736a-7287-4fd6-88db-e4a3165fe9c7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.225100] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e30086-701d-42bb-b971-d19bce3e26f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.228358] env[65758]: DEBUG oslo_vmware.api [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1144.228358] env[65758]: value = "task-4661280" [ 1144.228358] env[65758]: _type = "Task" [ 1144.228358] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.236689] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance '47cebd84-f9a1-4997-96aa-c76c5faa8c81' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1144.244333] env[65758]: DEBUG oslo_vmware.api [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.516667] env[65758]: DEBUG nova.objects.base [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Object Instance<62ae50af-ff52-4084-8161-1a650eff5247> lazy-loaded attributes: flavor,pci_requests {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1144.517060] env[65758]: DEBUG nova.network.neutron [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1144.517486] env[65758]: WARNING neutronclient.v2_0.client [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1144.517938] env[65758]: WARNING neutronclient.v2_0.client [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1144.518450] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1144.518885] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1144.594154] env[65758]: DEBUG nova.policy [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1144.598167] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.281s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.598167] env[65758]: INFO nova.compute.manager [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Migrating [ 1144.624448] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661278, 'name': Rename_Task, 'duration_secs': 0.164801} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.625565] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1144.625847] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8cbdecac-7dfb-4d18-8f15-53fbf468fedb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.633934] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1144.633934] env[65758]: value = "task-4661281" [ 1144.633934] env[65758]: _type = "Task" [ 1144.633934] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.643346] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661281, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.676113] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 3ff9192b-3956-49f6-afd2-827759826056] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1144.743545] env[65758]: DEBUG oslo_vmware.api [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168144} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.743927] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.744232] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1144.744563] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1144.744848] env[65758]: INFO nova.compute.manager [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1144.745241] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1144.747544] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1144.747784] env[65758]: DEBUG nova.compute.manager [-] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1144.747884] env[65758]: DEBUG nova.network.neutron [-] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1144.748166] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1144.748702] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1144.750167] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1144.755700] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6447ac70-c9e3-4e62-b80e-a40fe2bb4bec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.763606] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1144.763606] env[65758]: value = "task-4661282" [ 1144.763606] env[65758]: _type = "Task" [ 1144.763606] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.773297] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661282, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.787603] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1145.114121] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.114293] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.114469] env[65758]: DEBUG nova.network.neutron [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1145.145901] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661281, 'name': PowerOnVM_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.161220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.161514] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.161770] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.162204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.162204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.165565] env[65758]: INFO nova.compute.manager [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Terminating instance [ 1145.179718] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 1ff48e58-9240-466d-bec4-51394e550c34] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1145.274554] env[65758]: DEBUG oslo_vmware.api [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661282, 'name': PowerOnVM_Task, 'duration_secs': 0.486158} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.275190] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1145.275385] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-73de0157-5637-4ef0-a3d7-c7e2988e09a6 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance '47cebd84-f9a1-4997-96aa-c76c5faa8c81' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1145.473142] env[65758]: DEBUG nova.compute.manager [req-0944bc2f-cc4e-4892-aa30-24031d65ab71 req-d93e1cb9-0ead-4cd9-b4ff-2a5847ff746f service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Received event network-vif-deleted-2adc4687-14f6-4742-8afd-a86473befd61 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1145.473328] env[65758]: INFO nova.compute.manager [req-0944bc2f-cc4e-4892-aa30-24031d65ab71 req-d93e1cb9-0ead-4cd9-b4ff-2a5847ff746f service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Neutron deleted interface 2adc4687-14f6-4742-8afd-a86473befd61; detaching it from the instance and deleting it from the info cache [ 1145.473497] env[65758]: DEBUG nova.network.neutron [req-0944bc2f-cc4e-4892-aa30-24031d65ab71 req-d93e1cb9-0ead-4cd9-b4ff-2a5847ff746f service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1145.617755] env[65758]: WARNING neutronclient.v2_0.client [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1145.618548] env[65758]: WARNING openstack [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1145.618859] env[65758]: WARNING openstack [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1145.649717] env[65758]: DEBUG oslo_vmware.api [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661281, 'name': PowerOnVM_Task, 'duration_secs': 0.786682} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.650111] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1145.650406] env[65758]: DEBUG nova.compute.manager [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1145.651524] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25c9ea4-666d-42d5-924b-1216778405f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.671423] env[65758]: DEBUG nova.compute.manager [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1145.671761] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1145.672101] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce5a1bba-d575-4bca-a933-eb5a888eec0c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.681241] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1145.681241] env[65758]: value = "task-4661283" [ 1145.681241] env[65758]: _type = "Task" [ 1145.681241] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.684587] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: aa2f1858-2bb2-4f12-bc05-ef6913ef36e2] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1145.692442] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.785812] env[65758]: WARNING neutronclient.v2_0.client [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1145.786800] env[65758]: WARNING openstack [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1145.787329] env[65758]: WARNING openstack [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1145.803724] env[65758]: DEBUG nova.network.neutron [-] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1145.902702] env[65758]: DEBUG nova.network.neutron [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance_info_cache with network_info: [{"id": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "address": "fa:16:3e:12:8a:30", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06e56fe-72", "ovs_interfaceid": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1145.979864] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-194700ef-47ae-4292-800a-07594f1abd95 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.990907] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af377e5-8e23-4565-aa0a-c6b16fe69c48 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.024569] env[65758]: DEBUG nova.compute.manager [req-0944bc2f-cc4e-4892-aa30-24031d65ab71 req-d93e1cb9-0ead-4cd9-b4ff-2a5847ff746f service nova] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Detach interface failed, port_id=2adc4687-14f6-4742-8afd-a86473befd61, reason: Instance afc1eb16-c275-4b3b-a7fe-9938d2241e24 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1146.062681] env[65758]: DEBUG nova.compute.manager [req-cd2bdbf4-2795-4f89-a983-50dc0d6dcba8 req-70d0ffe5-af91-4699-8c6e-448d9b2c8006 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-vif-plugged-8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1146.062847] env[65758]: DEBUG oslo_concurrency.lockutils [req-cd2bdbf4-2795-4f89-a983-50dc0d6dcba8 req-70d0ffe5-af91-4699-8c6e-448d9b2c8006 service nova] Acquiring lock "62ae50af-ff52-4084-8161-1a650eff5247-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.063133] env[65758]: DEBUG oslo_concurrency.lockutils [req-cd2bdbf4-2795-4f89-a983-50dc0d6dcba8 req-70d0ffe5-af91-4699-8c6e-448d9b2c8006 service nova] Lock "62ae50af-ff52-4084-8161-1a650eff5247-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.063316] env[65758]: DEBUG oslo_concurrency.lockutils [req-cd2bdbf4-2795-4f89-a983-50dc0d6dcba8 req-70d0ffe5-af91-4699-8c6e-448d9b2c8006 service nova] Lock "62ae50af-ff52-4084-8161-1a650eff5247-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.063487] env[65758]: DEBUG nova.compute.manager [req-cd2bdbf4-2795-4f89-a983-50dc0d6dcba8 req-70d0ffe5-af91-4699-8c6e-448d9b2c8006 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] No waiting events found dispatching network-vif-plugged-8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1146.063835] env[65758]: WARNING nova.compute.manager [req-cd2bdbf4-2795-4f89-a983-50dc0d6dcba8 req-70d0ffe5-af91-4699-8c6e-448d9b2c8006 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received unexpected event network-vif-plugged-8964bfa9-6690-403d-9936-940d8087617c for instance with vm_state active and task_state None. [ 1146.151285] env[65758]: DEBUG nova.network.neutron [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Successfully updated port: 8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1146.173441] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.174091] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.174091] env[65758]: DEBUG nova.objects.instance [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1146.191554] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: be3de9bd-da98-4c7e-ad7c-933245523695] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1146.193646] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661283, 'name': PowerOffVM_Task, 'duration_secs': 0.245403} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.194551] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1146.194793] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1146.194990] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910036', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'name': 'volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '149655f8-fcf5-4cfe-ab96-1171b9d3b550', 'attached_at': '2025-11-21T13:22:45.000000', 'detached_at': '', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'serial': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1146.195827] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdc6fb5-64b0-461d-aaa0-b9f63fbae317 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.218830] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a17fddd-ca0f-4b03-8a5f-0a4b8ef63ac0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.226903] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc11373c-7566-424a-9126-488dc9c35978 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.245683] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80256918-c4a3-49b4-9753-1a8530144472 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.262334] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] The volume has not been displaced from its original location: [datastore2] volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1/volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1146.267981] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1146.268364] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0736fca2-c1ce-4ce9-bfe2-6d6b8ecdfd35 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.287565] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1146.287565] env[65758]: value = "task-4661284" [ 1146.287565] env[65758]: _type = "Task" [ 1146.287565] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.297408] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661284, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.307829] env[65758]: INFO nova.compute.manager [-] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Took 1.56 seconds to deallocate network for instance. [ 1146.406397] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.654377] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.654779] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.654779] env[65758]: DEBUG nova.network.neutron [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1146.695041] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 12c27fac-98e9-486d-bf36-0580a4e0a163] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1146.800232] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661284, 'name': ReconfigVM_Task, 'duration_secs': 0.242743} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.801309] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1146.806017] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8154b983-c4b3-44f5-aa36-311415f7a80e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.818913] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.827071] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1146.827071] env[65758]: value = "task-4661285" [ 1146.827071] env[65758]: _type = "Task" [ 1146.827071] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.840485] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.158400] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1147.158816] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1147.182544] env[65758]: DEBUG oslo_concurrency.lockutils [None req-27121e96-49c6-4973-9fc8-11f80052bb10 tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.183753] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.365s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.184302] env[65758]: DEBUG nova.objects.instance [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'resources' on Instance uuid afc1eb16-c275-4b3b-a7fe-9938d2241e24 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.197645] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 875cbc88-f817-4ea8-a969-b97e875918d1] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1147.203073] env[65758]: WARNING nova.network.neutron [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] 2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4 already exists in list: networks containing: ['2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4']. ignoring it [ 1147.332728] env[65758]: WARNING neutronclient.v2_0.client [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1147.333439] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1147.333831] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1147.348413] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661285, 'name': ReconfigVM_Task, 'duration_secs': 0.170726} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.348755] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910036', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'name': 'volume-eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '149655f8-fcf5-4cfe-ab96-1171b9d3b550', 'attached_at': '2025-11-21T13:22:45.000000', 'detached_at': '', 'volume_id': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1', 'serial': 'eef78269-5bc6-4fe3-9fa1-c9e27001a9e1'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1147.349093] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1147.349958] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061c724b-f6d0-4e74-9565-6cbaf827b06c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.358061] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1147.358353] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80689f57-f00f-4834-a7d0-08cfee0137be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.395931] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1147.464539] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1147.464954] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1147.465281] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1147.471064] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "a81095fb-6fe8-4b24-b763-1da983978460" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.471526] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "a81095fb-6fe8-4b24-b763-1da983978460" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.471788] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "a81095fb-6fe8-4b24-b763-1da983978460-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.471999] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "a81095fb-6fe8-4b24-b763-1da983978460-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.472203] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "a81095fb-6fe8-4b24-b763-1da983978460-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.474867] env[65758]: INFO nova.compute.manager [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Terminating instance [ 1147.504200] env[65758]: DEBUG nova.network.neutron [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Port 67e62b92-0851-4648-b7d7-181b274c8325 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 1147.504490] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.504824] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.504907] env[65758]: DEBUG nova.network.neutron [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1147.518036] env[65758]: WARNING neutronclient.v2_0.client [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1147.518811] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1147.519261] env[65758]: WARNING openstack [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1147.550753] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1147.551142] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1147.551464] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleting the datastore file [datastore2] 149655f8-fcf5-4cfe-ab96-1171b9d3b550 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1147.551912] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57c9a019-a87c-4a00-8a60-8a6d2f39691d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.561498] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1147.561498] env[65758]: value = "task-4661287" [ 1147.561498] env[65758]: _type = "Task" [ 1147.561498] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.571649] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.617683] env[65758]: DEBUG nova.network.neutron [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8964bfa9-6690-403d-9936-940d8087617c", "address": "fa:16:3e:f0:c3:e1", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8964bfa9-66", "ovs_interfaceid": "8964bfa9-6690-403d-9936-940d8087617c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1147.701532] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: ba3153f2-8e6f-469c-8730-957c5eebe97b] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1147.863478] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8117f66e-c464-4b04-afcc-b3ca28ce682f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.872266] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46db69a0-314d-4d4c-a990-faa144fb6b2d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.912888] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d45fa7e-0a16-4b83-a403-0fb311ed30c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.924014] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd08a29d-57af-4600-a59b-f8feadd25791 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.928757] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc286645-65b8-49cc-88c0-96e5cd2b4b30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.949485] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance 'b5bbff6b-42e9-4938-b4b3-05a9d5826d1c' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1147.960862] env[65758]: DEBUG nova.compute.provider_tree [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.980698] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "refresh_cache-a81095fb-6fe8-4b24-b763-1da983978460" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.980896] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquired lock "refresh_cache-a81095fb-6fe8-4b24-b763-1da983978460" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.981121] env[65758]: DEBUG nova.network.neutron [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1148.007667] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1148.008372] env[65758]: WARNING openstack [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1148.008727] env[65758]: WARNING openstack [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1148.073053] env[65758]: DEBUG oslo_vmware.api [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111002} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.073760] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1148.073963] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1148.074167] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1148.074369] env[65758]: INFO nova.compute.manager [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Took 2.40 seconds to destroy the instance on the hypervisor. [ 1148.074665] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1148.074897] env[65758]: DEBUG nova.compute.manager [-] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1148.074992] env[65758]: DEBUG nova.network.neutron [-] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1148.075277] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1148.075957] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1148.076260] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1148.095889] env[65758]: DEBUG nova.compute.manager [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-changed-8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1148.095994] env[65758]: DEBUG nova.compute.manager [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing instance network info cache due to event network-changed-8964bfa9-6690-403d-9936-940d8087617c. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1148.096126] env[65758]: DEBUG oslo_concurrency.lockutils [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.117739] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1148.119983] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.120696] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.120858] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.121454] env[65758]: DEBUG oslo_concurrency.lockutils [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.121653] env[65758]: DEBUG nova.network.neutron [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing network info cache for port 8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1148.123603] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e08801-9a51-4abd-9097-26427769e9ca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.144957] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1148.145210] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1148.145322] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1148.145513] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1148.145684] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1148.145827] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1148.146480] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1148.146480] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1148.146480] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1148.146480] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1148.146708] env[65758]: DEBUG nova.virt.hardware [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1148.152971] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Reconfiguring VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1148.156527] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a518b6d-93ad-4e4a-bc92-cc0473412c69 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.177165] env[65758]: DEBUG oslo_vmware.api [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1148.177165] env[65758]: value = "task-4661288" [ 1148.177165] env[65758]: _type = "Task" [ 1148.177165] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.186471] env[65758]: DEBUG oslo_vmware.api [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661288, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.204643] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: e93528eb-33d0-46d1-94e8-d1d66f2c682f] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1148.263749] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1148.264855] env[65758]: WARNING openstack [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1148.265285] env[65758]: WARNING openstack [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1148.389573] env[65758]: DEBUG nova.network.neutron [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance_info_cache with network_info: [{"id": "67e62b92-0851-4648-b7d7-181b274c8325", "address": "fa:16:3e:23:1f:98", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e62b92-08", "ovs_interfaceid": "67e62b92-0851-4648-b7d7-181b274c8325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1148.465507] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1148.466139] env[65758]: DEBUG nova.scheduler.client.report [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.469842] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22369a3c-c582-4df4-b829-3e5f8590ad1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.479283] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1148.479283] env[65758]: value = "task-4661289" [ 1148.479283] env[65758]: _type = "Task" [ 1148.479283] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.483519] env[65758]: WARNING neutronclient.v2_0.client [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1148.484698] env[65758]: WARNING openstack [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1148.485098] env[65758]: WARNING openstack [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1148.499191] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661289, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.513026] env[65758]: DEBUG nova.network.neutron [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1148.573570] env[65758]: DEBUG nova.network.neutron [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1148.628622] env[65758]: WARNING neutronclient.v2_0.client [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1148.629736] env[65758]: WARNING openstack [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1148.630359] env[65758]: WARNING openstack [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1148.692883] env[65758]: DEBUG oslo_vmware.api [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.708391] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: df46c28d-7cbd-490e-8db2-9730e4d9f953] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1148.803130] env[65758]: WARNING neutronclient.v2_0.client [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1148.803861] env[65758]: WARNING openstack [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1148.804259] env[65758]: WARNING openstack [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1148.893104] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.915641] env[65758]: DEBUG nova.network.neutron [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updated VIF entry in instance network info cache for port 8964bfa9-6690-403d-9936-940d8087617c. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1148.916131] env[65758]: DEBUG nova.network.neutron [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8964bfa9-6690-403d-9936-940d8087617c", "address": "fa:16:3e:f0:c3:e1", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8964bfa9-66", "ovs_interfaceid": "8964bfa9-6690-403d-9936-940d8087617c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1148.974728] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.990646] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661289, 'name': PowerOffVM_Task, 'duration_secs': 0.329569} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.990945] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1148.991218] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance 'b5bbff6b-42e9-4938-b4b3-05a9d5826d1c' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1149.000204] env[65758]: INFO nova.scheduler.client.report [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted allocations for instance afc1eb16-c275-4b3b-a7fe-9938d2241e24 [ 1149.077068] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Releasing lock "refresh_cache-a81095fb-6fe8-4b24-b763-1da983978460" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.077493] env[65758]: DEBUG nova.compute.manager [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1149.077822] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1149.079232] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206a02e3-acf4-4313-9a08-717056b726e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.088974] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1149.089255] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f9b1322-4199-4076-a2f2-146e28880801 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.091436] env[65758]: DEBUG nova.network.neutron [-] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1149.097211] env[65758]: DEBUG oslo_vmware.api [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1149.097211] env[65758]: value = "task-4661290" [ 1149.097211] env[65758]: _type = "Task" [ 1149.097211] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.107189] env[65758]: DEBUG oslo_vmware.api [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.191041] env[65758]: DEBUG oslo_vmware.api [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661288, 'name': ReconfigVM_Task, 'duration_secs': 0.951441} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.192722] env[65758]: WARNING neutronclient.v2_0.client [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1149.192722] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.192722] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Reconfigured VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1149.211374] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 79c63944-c4c8-4c7c-bc42-3f958d737e66] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1149.397545] env[65758]: DEBUG nova.compute.manager [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=65758) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:925}} [ 1149.397955] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.398318] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.418624] env[65758]: DEBUG oslo_concurrency.lockutils [req-77b81728-fba6-4545-b241-41ec31a1385a req-4f7044b5-6a75-4f7e-a9fb-e4693f583936 service nova] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.499400] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1149.499608] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.499778] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1149.499976] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.500134] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1149.500276] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1149.500476] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1149.500629] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1149.500789] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1149.500940] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1149.501122] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1149.508454] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eaa971cb-710f-4c9b-beea-04a1bf69a346 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.519656] env[65758]: DEBUG oslo_concurrency.lockutils [None req-69a1dd29-e815-4de3-856e-e88492f272fe tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "afc1eb16-c275-4b3b-a7fe-9938d2241e24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.423s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.529279] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1149.529279] env[65758]: value = "task-4661291" [ 1149.529279] env[65758]: _type = "Task" [ 1149.529279] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.544984] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661291, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.593630] env[65758]: INFO nova.compute.manager [-] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Took 1.52 seconds to deallocate network for instance. [ 1149.607364] env[65758]: DEBUG oslo_vmware.api [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661290, 'name': PowerOffVM_Task, 'duration_secs': 0.164601} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.607633] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1149.607802] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.608120] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-add945ce-293a-4887-9ebc-605d5c6ba41a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.638302] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.638540] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.638718] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Deleting the datastore file [datastore2] a81095fb-6fe8-4b24-b763-1da983978460 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.639032] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7c9300d-4c3b-49a6-8c92-978fd2849ebd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.645842] env[65758]: DEBUG oslo_vmware.api [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for the task: (returnval){ [ 1149.645842] env[65758]: value = "task-4661293" [ 1149.645842] env[65758]: _type = "Task" [ 1149.645842] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.654882] env[65758]: DEBUG oslo_vmware.api [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.697019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-777729c9-5f21-4b67-8ddc-d595b1da9187 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-62ae50af-ff52-4084-8161-1a650eff5247-8964bfa9-6690-403d-9936-940d8087617c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.336s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.714628] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a5b9e6c2-1bba-458e-88a5-9ca972fa4b0a] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1149.901814] env[65758]: DEBUG nova.objects.instance [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'migration_context' on Instance uuid 47cebd84-f9a1-4997-96aa-c76c5faa8c81 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.046649] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661291, 'name': ReconfigVM_Task, 'duration_secs': 0.244684} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.046968] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance 'b5bbff6b-42e9-4938-b4b3-05a9d5826d1c' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1150.131459] env[65758]: DEBUG nova.compute.manager [req-e49ee13d-580d-4e6d-8191-051e7f998212 req-1b3fc09f-341b-4cd0-a044-9e006d117007 service nova] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Received event network-vif-deleted-05e0fa46-1b67-477a-bc40-26c9641f6549 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1150.140662] env[65758]: INFO nova.compute.manager [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Took 0.55 seconds to detach 1 volumes for instance. [ 1150.142979] env[65758]: DEBUG nova.compute.manager [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Deleting volume: eef78269-5bc6-4fe3-9fa1-c9e27001a9e1 {{(pid=65758) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3320}} [ 1150.157449] env[65758]: DEBUG oslo_vmware.api [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Task: {'id': task-4661293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148748} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.157664] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1150.157747] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1150.159027] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1150.159027] env[65758]: INFO nova.compute.manager [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1150.159027] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1150.159027] env[65758]: DEBUG nova.compute.manager [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1150.159027] env[65758]: DEBUG nova.network.neutron [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1150.159027] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1150.159361] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1150.159788] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1150.184442] env[65758]: DEBUG nova.network.neutron [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1150.184442] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1150.217870] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 31816c0c-d7d2-48db-9a87-a1e03c938a60] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1150.553409] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1150.553670] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1150.553844] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1150.554020] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1150.554148] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1150.555345] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1150.555602] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1150.555754] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1150.555920] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1150.556121] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1150.556304] env[65758]: DEBUG nova.virt.hardware [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1150.561713] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1150.564623] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35f749c4-0052-4136-814c-f368af2aad1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.589035] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1150.589035] env[65758]: value = "task-4661295" [ 1150.589035] env[65758]: _type = "Task" [ 1150.589035] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.594655] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcf8099-76e2-46aa-bbc0-d420c818b794 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.600937] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661295, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.607348] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19c6276-a88b-48ac-909c-773932d234c5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.644085] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed85819-943b-41c6-8935-46658cbfeb88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.652917] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b40b9de-6b88-414f-baf0-2c9e439beda9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.667763] env[65758]: DEBUG nova.compute.provider_tree [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.687271] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.687819] env[65758]: DEBUG nova.network.neutron [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1150.721069] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 454bd092-f683-4a3a-91c9-65191d6996f4] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1151.090167] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-62ae50af-ff52-4084-8161-1a650eff5247-8964bfa9-6690-403d-9936-940d8087617c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.090441] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-62ae50af-ff52-4084-8161-1a650eff5247-8964bfa9-6690-403d-9936-940d8087617c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.104470] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661295, 'name': ReconfigVM_Task, 'duration_secs': 0.182131} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.105710] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1151.106547] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf25f054-22e0-4177-8464-bc7c472c7a67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.131469] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] b5bbff6b-42e9-4938-b4b3-05a9d5826d1c/b5bbff6b-42e9-4938-b4b3-05a9d5826d1c.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.132367] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-968fd983-4f13-4df9-a976-ded7e24b15f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.153620] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1151.153620] env[65758]: value = "task-4661296" [ 1151.153620] env[65758]: _type = "Task" [ 1151.153620] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.162577] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661296, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.170728] env[65758]: DEBUG nova.scheduler.client.report [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.190914] env[65758]: INFO nova.compute.manager [-] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Took 1.03 seconds to deallocate network for instance. [ 1151.224949] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 3a7d0c08-9de6-47f4-a0c3-871458ccc4e3] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1151.597996] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.597996] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.599012] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28808e33-4c5e-4da2-a498-e21d30920783 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.617183] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27e283b-e471-4d4e-ae90-13e8926297ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.638471] env[65758]: WARNING neutronclient.v2_0.client [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1151.644040] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Reconfiguring VM to detach interface {{(pid=65758) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1151.644777] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8784ba44-c4c0-4f52-bde7-f80d810a6cd9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.666568] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661296, 'name': ReconfigVM_Task, 'duration_secs': 0.273755} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.667855] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Reconfigured VM instance instance-0000006a to attach disk [datastore2] b5bbff6b-42e9-4938-b4b3-05a9d5826d1c/b5bbff6b-42e9-4938-b4b3-05a9d5826d1c.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1151.668131] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance 'b5bbff6b-42e9-4938-b4b3-05a9d5826d1c' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1151.671483] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1151.671483] env[65758]: value = "task-4661297" [ 1151.671483] env[65758]: _type = "Task" [ 1151.671483] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.686538] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.697429] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.728282] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 7f5911fb-785e-444c-9408-c6884e06c5d3] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1151.815683] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.815962] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.178469] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83507c75-91ad-43e0-b99a-9ab87f876b1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.182070] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.784s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.190320] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.503s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.190646] env[65758]: DEBUG nova.objects.instance [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'resources' on Instance uuid 149655f8-fcf5-4cfe-ab96-1171b9d3b550 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.211880] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.215255] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd690459-4439-4a81-adbf-57b9cfd073ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.237312] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 96103549-80a5-462d-9f73-f5f6363ab9fc] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1152.239246] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance 'b5bbff6b-42e9-4938-b4b3-05a9d5826d1c' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1152.318662] env[65758]: DEBUG nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1152.683231] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.744469] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 85082b72-89dd-47b7-b8ad-f2ad5ad0638d] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1152.750996] env[65758]: WARNING neutronclient.v2_0.client [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1152.750996] env[65758]: WARNING neutronclient.v2_0.client [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1152.795215] env[65758]: DEBUG nova.network.neutron [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Port e06e56fe-7299-46f2-9238-9f1351c4ce06 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 1152.843015] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.918852] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b748b85b-63c5-4ae4-82fe-faca7d62a0fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.928118] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fec5454-6043-42e6-aeda-c5519a2c066a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.960069] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f6e102-47b6-4664-9a19-df6006e045c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.968599] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eadeef-8b6f-4a31-beea-9f81a8fa2fb4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.983155] env[65758]: DEBUG nova.compute.provider_tree [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.184991] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.243869] env[65758]: INFO nova.compute.manager [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Swapping old allocation on dict_keys(['0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51']) held by migration 5faf2f1a-3a2e-4e55-88e6-d1fd72448b69 for instance [ 1153.255111] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: d42d0818-1486-4696-9871-2cf989aeb885] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1153.270461] env[65758]: DEBUG nova.scheduler.client.report [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Overwriting current allocation {'allocations': {'0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 149}}, 'project_id': 'c4c2ab2b80c04c38bfb4c7cafac87fe6', 'user_id': 'b15f650508f844388197b63e6fee78a1', 'consumer_generation': 1} on consumer 47cebd84-f9a1-4997-96aa-c76c5faa8c81 {{(pid=65758) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1153.326489] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1153.370577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.370799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.371057] env[65758]: DEBUG nova.network.neutron [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1153.486613] env[65758]: DEBUG nova.scheduler.client.report [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.684682] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.760547] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 4e4dd117-b4ea-43bb-a14f-55a5ab0e2d49] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1153.817429] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.817745] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.817921] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.873830] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1153.874589] env[65758]: WARNING openstack [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1153.874951] env[65758]: WARNING openstack [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1153.994409] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.804s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.997109] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.300s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.997327] env[65758]: DEBUG nova.objects.instance [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lazy-loading 'resources' on Instance uuid a81095fb-6fe8-4b24-b763-1da983978460 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.013792] env[65758]: INFO nova.scheduler.client.report [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted allocations for instance 149655f8-fcf5-4cfe-ab96-1171b9d3b550 [ 1154.076970] env[65758]: WARNING neutronclient.v2_0.client [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1154.077765] env[65758]: WARNING openstack [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1154.078223] env[65758]: WARNING openstack [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1154.163828] env[65758]: DEBUG nova.network.neutron [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance_info_cache with network_info: [{"id": "67e62b92-0851-4648-b7d7-181b274c8325", "address": "fa:16:3e:23:1f:98", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e62b92-08", "ovs_interfaceid": "67e62b92-0851-4648-b7d7-181b274c8325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1154.184511] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.264961] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: b7323030-4573-4af5-a19a-212a140d642a] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1154.521422] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a08b5be6-4ab8-46bf-bbf4-67ca8852fc1e tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "149655f8-fcf5-4cfe-ab96-1171b9d3b550" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.360s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.632227] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5caa6f7f-a8ce-46b7-8de6-dc72097af013 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.640119] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bb3c4f-1f24-4e1b-83a9-87493f6ec6c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.669774] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-47cebd84-f9a1-4997-96aa-c76c5faa8c81" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.670253] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.670686] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83183d8f-4261-4d71-8fc6-e6aa957f2be1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.672742] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd9cdea-ccae-4377-9504-91418fc1c172 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.681987] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1154.681987] env[65758]: value = "task-4661298" [ 1154.681987] env[65758]: _type = "Task" [ 1154.681987] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.686870] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.690922] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d817649-5e93-459a-9aa5-7d7cd6af8e31 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.700309] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.708121] env[65758]: DEBUG nova.compute.provider_tree [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.768467] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: c1b9d81e-d747-4665-a083-26d8383f7645] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1154.824850] env[65758]: WARNING neutronclient.v2_0.client [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1154.857726] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.857922] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.858117] env[65758]: DEBUG nova.network.neutron [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1155.186764] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.194537] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661298, 'name': PowerOffVM_Task, 'duration_secs': 0.233324} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.194807] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1155.195477] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1155.195684] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1155.195832] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1155.196015] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1155.196164] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1155.196301] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1155.196496] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1155.196648] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1155.196799] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1155.196971] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1155.197162] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1155.201944] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1a3fb78-3a8e-4298-a40f-a9135d7e2aa4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.212376] env[65758]: DEBUG nova.scheduler.client.report [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.221539] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1155.221539] env[65758]: value = "task-4661299" [ 1155.221539] env[65758]: _type = "Task" [ 1155.221539] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.230698] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661299, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.272183] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 548edde0-9e42-4cd3-bdd3-3615ab9b7fc5] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1155.361737] env[65758]: WARNING neutronclient.v2_0.client [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1155.362424] env[65758]: WARNING openstack [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1155.362838] env[65758]: WARNING openstack [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1155.465775] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.466034] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.466254] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.466429] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.466605] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.469014] env[65758]: INFO nova.compute.manager [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Terminating instance [ 1155.520234] env[65758]: WARNING neutronclient.v2_0.client [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1155.521323] env[65758]: WARNING openstack [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1155.521745] env[65758]: WARNING openstack [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1155.600690] env[65758]: DEBUG nova.network.neutron [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance_info_cache with network_info: [{"id": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "address": "fa:16:3e:12:8a:30", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06e56fe-72", "ovs_interfaceid": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1155.686176] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.717220] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.720s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.719600] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.877s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.721134] env[65758]: INFO nova.compute.claims [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1155.733802] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661299, 'name': ReconfigVM_Task, 'duration_secs': 0.142344} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.734850] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4231e584-d354-48bb-afa0-9a3f8f98fb99 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.754441] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1155.754731] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1155.754903] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1155.755094] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1155.755320] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1155.755467] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1155.755695] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1155.755853] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1155.756025] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1155.756214] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1155.756412] env[65758]: DEBUG nova.virt.hardware [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1155.758166] env[65758]: INFO nova.scheduler.client.report [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Deleted allocations for instance a81095fb-6fe8-4b24-b763-1da983978460 [ 1155.759346] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a4608bc-7269-4be3-9e4c-727dae3e0b7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.768392] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1155.768392] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ffc944-5dc4-a7b1-72e5-d80854c8bd4b" [ 1155.768392] env[65758]: _type = "Task" [ 1155.768392] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.777210] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: d60aaa5c-913f-4550-a4d5-ab994048da9f] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1155.778906] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ffc944-5dc4-a7b1-72e5-d80854c8bd4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.972702] env[65758]: DEBUG nova.compute.manager [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1155.972905] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1155.973826] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f32a3cd-1c31-4736-a556-afeec8676a24 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.982533] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.982787] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8157e611-0ff2-40c1-80b1-7af036b84aae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.989921] env[65758]: DEBUG oslo_vmware.api [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1155.989921] env[65758]: value = "task-4661300" [ 1155.989921] env[65758]: _type = "Task" [ 1155.989921] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.000471] env[65758]: DEBUG oslo_vmware.api [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.103204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.187145] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.268351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-2a04a4b1-f6e2-4a62-b339-54602ff136df tempest-ServersListShow2100Test-129593015 tempest-ServersListShow2100Test-129593015-project-member] Lock "a81095fb-6fe8-4b24-b763-1da983978460" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.797s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.280701] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 596a5005-3607-44a2-9c0e-f1a56865011c] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1156.282686] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ffc944-5dc4-a7b1-72e5-d80854c8bd4b, 'name': SearchDatastore_Task, 'duration_secs': 0.011692} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.288679] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfiguring VM instance instance-00000063 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1156.289636] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6aaeca68-1cc6-4cde-9325-2dd9d4191c51 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.310461] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1156.310461] env[65758]: value = "task-4661301" [ 1156.310461] env[65758]: _type = "Task" [ 1156.310461] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.320776] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661301, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.500369] env[65758]: DEBUG oslo_vmware.api [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661300, 'name': PowerOffVM_Task, 'duration_secs': 0.196918} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.500673] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.500841] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.501123] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af5a6dca-31a9-4fa0-8d75-2ba10fe9c025 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.570885] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.571157] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.571351] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleting the datastore file [datastore2] 31b7d1ee-58c1-47f3-a862-0bc5cb17addc {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.571637] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c83aa65-fff3-40f2-bb9b-bbb70bd031d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.579765] env[65758]: DEBUG oslo_vmware.api [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1156.579765] env[65758]: value = "task-4661303" [ 1156.579765] env[65758]: _type = "Task" [ 1156.579765] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.589833] env[65758]: DEBUG oslo_vmware.api [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661303, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.632676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ab52f1-1c79-4e22-90b9-dfc9d9df1843 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.654696] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60de0440-8bd3-4025-9326-f0c55801d0e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.663139] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance 'b5bbff6b-42e9-4938-b4b3-05a9d5826d1c' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1156.688412] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.783927] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 7c0e6911-4f85-4b47-a7e9-84d0e3bb5720] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1156.824220] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661301, 'name': ReconfigVM_Task, 'duration_secs': 0.164551} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.824516] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfigured VM instance instance-00000063 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1156.825368] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f36761c-e058-4874-a1e1-582ae5890b83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.856198] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81/47cebd84-f9a1-4997-96aa-c76c5faa8c81.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.859925] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6924e4fe-4c60-4edc-81c5-893c3c03fc51 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.881564] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1156.881564] env[65758]: value = "task-4661304" [ 1156.881564] env[65758]: _type = "Task" [ 1156.881564] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.894112] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661304, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.909442] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04fbf96-00a0-41af-88a6-57115e75e401 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.919266] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063336ca-81ba-407f-842d-33e119343fab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.952940] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dcf677-93e5-44be-bfd3-6f18198ad48f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.962466] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd7ee0a-7aad-4b57-8d70-64bb767d13e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.977226] env[65758]: DEBUG nova.compute.provider_tree [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.091701] env[65758]: DEBUG oslo_vmware.api [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661303, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143002} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.091985] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.092195] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.092366] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.092548] env[65758]: INFO nova.compute.manager [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1157.092814] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1157.093033] env[65758]: DEBUG nova.compute.manager [-] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1157.093119] env[65758]: DEBUG nova.network.neutron [-] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1157.093378] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1157.094134] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1157.094254] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1157.148046] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1157.169853] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.170218] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d075864e-7f72-4583-98e2-56507aeb29da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.177521] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1157.177521] env[65758]: value = "task-4661305" [ 1157.177521] env[65758]: _type = "Task" [ 1157.177521] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.190306] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.193651] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.287921] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: b7e2a3d9-7db3-40b3-98a5-c6e6e040a947] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1157.400972] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661304, 'name': ReconfigVM_Task, 'duration_secs': 0.325986} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.401476] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81/47cebd84-f9a1-4997-96aa-c76c5faa8c81.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.402841] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e3a58d-5320-4993-8f39-c77485dcfe91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.425897] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3949e5bc-0291-4abd-bcdf-276aa86a76f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.429554] env[65758]: DEBUG nova.compute.manager [req-5db98be4-ddcc-4322-a9fa-9b741b2e460c req-bf9b7adc-38f5-4b91-8145-4884a17a3e3d service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Received event network-vif-deleted-f30ab0a1-5ab3-4e16-a881-f850a8fd4399 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1157.429750] env[65758]: INFO nova.compute.manager [req-5db98be4-ddcc-4322-a9fa-9b741b2e460c req-bf9b7adc-38f5-4b91-8145-4884a17a3e3d service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Neutron deleted interface f30ab0a1-5ab3-4e16-a881-f850a8fd4399; detaching it from the instance and deleting it from the info cache [ 1157.432144] env[65758]: DEBUG nova.network.neutron [req-5db98be4-ddcc-4322-a9fa-9b741b2e460c req-bf9b7adc-38f5-4b91-8145-4884a17a3e3d service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1157.451955] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955f1e03-fbc2-42c8-adec-fb48386eb7f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.482057] env[65758]: DEBUG nova.scheduler.client.report [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.486132] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f65b761-d2e9-4596-ae3c-461ba9ff9adc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.495207] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.495523] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4bb87bb-315a-43a6-8f9a-250ce01addc4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.505176] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1157.505176] env[65758]: value = "task-4661306" [ 1157.505176] env[65758]: _type = "Task" [ 1157.505176] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.514960] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661306, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.690223] env[65758]: DEBUG oslo_vmware.api [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661297, 'name': ReconfigVM_Task, 'duration_secs': 5.805457} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.693170] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.693385] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Reconfigured VM to detach interface {{(pid=65758) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1157.693839] env[65758]: WARNING neutronclient.v2_0.client [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1157.694141] env[65758]: WARNING neutronclient.v2_0.client [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1157.694774] env[65758]: WARNING openstack [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1157.695193] env[65758]: WARNING openstack [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1157.701747] env[65758]: DEBUG oslo_vmware.api [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661305, 'name': PowerOnVM_Task, 'duration_secs': 0.449555} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.702209] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1157.702387] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5f0c0d-b14d-467e-83ac-9c4d0e80b699 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance 'b5bbff6b-42e9-4938-b4b3-05a9d5826d1c' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1157.731312] env[65758]: WARNING neutronclient.v2_0.client [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1157.791907] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a9ec9a64-94c7-41a5-a7a4-5e034ddfc592] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1157.895407] env[65758]: DEBUG nova.network.neutron [-] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1157.933535] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5ce43b1-3db1-4fcf-8854-d65edee0af9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.946028] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4c7c6f-75bc-458f-92ae-1607d59c19a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.977588] env[65758]: DEBUG nova.compute.manager [req-5db98be4-ddcc-4322-a9fa-9b741b2e460c req-bf9b7adc-38f5-4b91-8145-4884a17a3e3d service nova] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Detach interface failed, port_id=f30ab0a1-5ab3-4e16-a881-f850a8fd4399, reason: Instance 31b7d1ee-58c1-47f3-a862-0bc5cb17addc could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1157.990223] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.990780] env[65758]: DEBUG nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1158.016587] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661306, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.295429] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 81f961c3-ec8f-4281-be18-5d605fa73ecc] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1158.397559] env[65758]: INFO nova.compute.manager [-] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Took 1.30 seconds to deallocate network for instance. [ 1158.496186] env[65758]: DEBUG nova.compute.utils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1158.499564] env[65758]: DEBUG nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1158.499564] env[65758]: DEBUG nova.network.neutron [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1158.499564] env[65758]: WARNING neutronclient.v2_0.client [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1158.499564] env[65758]: WARNING neutronclient.v2_0.client [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1158.500617] env[65758]: WARNING openstack [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1158.500617] env[65758]: WARNING openstack [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1158.525354] env[65758]: DEBUG oslo_vmware.api [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661306, 'name': PowerOnVM_Task, 'duration_secs': 0.72528} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.525915] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.802984] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 56ff4122-a999-4caf-b805-0754a66d6bc7] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1158.905136] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.905136] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.905136] env[65758]: DEBUG nova.objects.instance [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'resources' on Instance uuid 31b7d1ee-58c1-47f3-a862-0bc5cb17addc {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.999465] env[65758]: DEBUG nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1159.090154] env[65758]: DEBUG nova.policy [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6b7220ea9a34475879748959534988d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2440f1694fe4b87a9827f6653ff2e4c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1159.092748] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.092988] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.093188] env[65758]: DEBUG nova.network.neutron [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1159.305751] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 105c53ce-e657-4a29-bc7f-96b4f885707a] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1159.519630] env[65758]: DEBUG nova.network.neutron [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Successfully created port: f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1159.546938] env[65758]: INFO nova.compute.manager [None req-f1a24305-d6bb-4912-bede-99c8bc9a8d18 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance to original state: 'active' [ 1159.600544] env[65758]: WARNING neutronclient.v2_0.client [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1159.601526] env[65758]: WARNING openstack [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1159.602078] env[65758]: WARNING openstack [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1159.628808] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5399bdc8-9522-42d7-ae0a-692e9999614d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.638798] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fc31f7-89ef-4205-9135-d30d35c7e73a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.684395] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5095cdf-582a-49c7-a024-bf878b2f173f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.687866] env[65758]: DEBUG nova.compute.manager [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1159.688071] env[65758]: DEBUG nova.compute.manager [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing instance network info cache due to event network-changed-4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1159.688310] env[65758]: DEBUG oslo_concurrency.lockutils [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] Acquiring lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.695452] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2a8c68-2e07-47ff-a218-5b0d5d24d14d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.711043] env[65758]: DEBUG nova.compute.provider_tree [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.809255] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 5e54e7f4-3df1-4283-bee1-a7e475051a24] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1159.951320] env[65758]: WARNING neutronclient.v2_0.client [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1159.952232] env[65758]: WARNING openstack [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1159.952767] env[65758]: WARNING openstack [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1160.018350] env[65758]: DEBUG nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1160.063898] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1160.064204] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1160.064367] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1160.064800] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1160.065033] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1160.065195] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1160.065850] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1160.065850] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1160.065850] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1160.066062] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1160.066117] env[65758]: DEBUG nova.virt.hardware [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1160.067061] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b02164b-da72-441e-9daa-8d51849a243e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.080633] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ec0012-994e-438d-a02c-0ea9d19625b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.100260] env[65758]: INFO nova.network.neutron [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Port 8964bfa9-6690-403d-9936-940d8087617c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1160.100627] env[65758]: DEBUG nova.network.neutron [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1160.216625] env[65758]: DEBUG nova.scheduler.client.report [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.251248] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.251454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.251639] env[65758]: DEBUG nova.compute.manager [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Going to confirm migration 9 {{(pid=65758) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5290}} [ 1160.313246] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 47bb5b02-4f84-468e-ad46-2c1c96b65c97] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1160.574639] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-8d0419d1-c301-4302-80c1-cd0fce7ccba4-8964bfa9-6690-403d-9936-940d8087617c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.575009] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-8d0419d1-c301-4302-80c1-cd0fce7ccba4-8964bfa9-6690-403d-9936-940d8087617c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.575236] env[65758]: DEBUG nova.objects.instance [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'flavor' on Instance uuid 8d0419d1-c301-4302-80c1-cd0fce7ccba4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.595768] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.596028] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.603579] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.606338] env[65758]: DEBUG oslo_concurrency.lockutils [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] Acquired lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.606536] env[65758]: DEBUG nova.network.neutron [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Refreshing network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1160.721473] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.817s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.742593] env[65758]: INFO nova.scheduler.client.report [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted allocations for instance 31b7d1ee-58c1-47f3-a862-0bc5cb17addc [ 1160.759270] env[65758]: WARNING neutronclient.v2_0.client [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1160.796533] env[65758]: WARNING neutronclient.v2_0.client [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1160.796888] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.797099] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquired lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.797279] env[65758]: DEBUG nova.network.neutron [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1160.797459] env[65758]: DEBUG nova.objects.instance [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'info_cache' on Instance uuid b5bbff6b-42e9-4938-b4b3-05a9d5826d1c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.816521] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a662eac8-07e2-47f1-a4dd-9abbe824817d] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1161.081109] env[65758]: WARNING neutronclient.v2_0.client [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.081803] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.082179] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1161.098994] env[65758]: DEBUG nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1161.111095] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4e534b97-d588-4833-bc8f-d2a7f53288af tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-62ae50af-ff52-4084-8161-1a650eff5247-8964bfa9-6690-403d-9936-940d8087617c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.020s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.116022] env[65758]: WARNING neutronclient.v2_0.client [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.116022] env[65758]: WARNING openstack [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.116022] env[65758]: WARNING openstack [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1161.126417] env[65758]: DEBUG nova.network.neutron [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Successfully updated port: f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1161.214948] env[65758]: DEBUG nova.objects.instance [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'pci_requests' on Instance uuid 8d0419d1-c301-4302-80c1-cd0fce7ccba4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.254437] env[65758]: DEBUG oslo_concurrency.lockutils [None req-627fcd70-231d-43d6-ad23-ae7e8377b6b5 tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "31b7d1ee-58c1-47f3-a862-0bc5cb17addc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.788s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.289567] env[65758]: WARNING neutronclient.v2_0.client [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.290720] env[65758]: WARNING openstack [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.291342] env[65758]: WARNING openstack [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1161.319655] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: f15c6953-f76b-44eb-bd1b-c0d3adddc163] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1161.394287] env[65758]: DEBUG nova.network.neutron [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updated VIF entry in instance network info cache for port 4b156aab-9aa2-46c6-8e9f-b9912654dcc0. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1161.394859] env[65758]: DEBUG nova.network.neutron [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [{"id": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "address": "fa:16:3e:07:39:5d", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b156aab-9a", "ovs_interfaceid": "4b156aab-9aa2-46c6-8e9f-b9912654dcc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1161.623861] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.624159] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.625849] env[65758]: INFO nova.compute.claims [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1161.630019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.630183] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.631028] env[65758]: DEBUG nova.network.neutron [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1161.718280] env[65758]: DEBUG nova.objects.base [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Object Instance<8d0419d1-c301-4302-80c1-cd0fce7ccba4> lazy-loaded attributes: flavor,pci_requests {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1161.718526] env[65758]: DEBUG nova.network.neutron [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1161.718872] env[65758]: WARNING neutronclient.v2_0.client [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.719215] env[65758]: WARNING neutronclient.v2_0.client [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.719851] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.720242] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1161.796946] env[65758]: DEBUG nova.policy [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '247c8989cf1942b3b068da657f006453', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64ffccae76ed401582dd915ae5f87922', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1161.811404] env[65758]: WARNING neutronclient.v2_0.client [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.812161] env[65758]: WARNING openstack [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.812521] env[65758]: WARNING openstack [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1161.824144] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: cca3e019-8e82-4473-8609-291703762a6e] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1161.898301] env[65758]: DEBUG oslo_concurrency.lockutils [req-ec3e3e4a-c427-4966-a7f9-b52b82a90e77 req-47f7f38a-e27d-4d47-b928-6a74711e8a5a service nova] Releasing lock "refresh_cache-62ae50af-ff52-4084-8161-1a650eff5247" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.967434] env[65758]: WARNING neutronclient.v2_0.client [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1161.968503] env[65758]: WARNING openstack [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1161.968884] env[65758]: WARNING openstack [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1162.053067] env[65758]: DEBUG nova.network.neutron [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance_info_cache with network_info: [{"id": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "address": "fa:16:3e:12:8a:30", "network": {"id": "4b60babe-f911-44aa-81a1-a04fcda902dc", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-378345345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bad3e3c7054c424a800cb12e9c5dbb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06e56fe-72", "ovs_interfaceid": "e06e56fe-7299-46f2-9238-9f1351c4ce06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1162.133022] env[65758]: WARNING openstack [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1162.133514] env[65758]: WARNING openstack [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1162.170157] env[65758]: DEBUG nova.network.neutron [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1162.242186] env[65758]: WARNING neutronclient.v2_0.client [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1162.242740] env[65758]: WARNING openstack [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1162.243198] env[65758]: WARNING openstack [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1162.326411] env[65758]: DEBUG nova.network.neutron [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [{"id": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "address": "fa:16:3e:9d:65:b5", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dcfa87-c0", "ovs_interfaceid": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1162.327990] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: e6159a35-f073-4931-b0b0-832a88680356] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1162.556217] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Releasing lock "refresh_cache-b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.556492] env[65758]: DEBUG nova.objects.instance [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lazy-loading 'migration_context' on Instance uuid b5bbff6b-42e9-4938-b4b3-05a9d5826d1c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.781335] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65dbbee2-72c7-4fe7-b19f-633a17e0cb04 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.790415] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1acf7b1-d80f-4622-9bd6-465c34c91739 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.821551] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9543bf-c1ec-4a92-8057-faaaf7f931f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.830144] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fab93a-cd77-4b41-888c-dfb48355ad75 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.834682] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.835053] env[65758]: DEBUG nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Instance network_info: |[{"id": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "address": "fa:16:3e:9d:65:b5", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dcfa87-c0", "ovs_interfaceid": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1162.835495] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 974d06c1-2704-4a78-bbd7-f54335c4288e] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1162.837390] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:65:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3dcfa87-c097-4b94-bab6-e9fd7455605b', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1162.844861] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1162.845826] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1162.846086] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43e298d3-59a3-4acb-8fee-ee4bbcc0bc84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.870638] env[65758]: DEBUG nova.compute.provider_tree [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.878269] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1162.878269] env[65758]: value = "task-4661307" [ 1162.878269] env[65758]: _type = "Task" [ 1162.878269] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.887769] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661307, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.059530] env[65758]: DEBUG nova.objects.base [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1163.060621] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5614789f-a8f0-40a9-a346-98ba2fbe48ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.081481] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d874bf30-d033-4304-b4d3-a677f6533f90 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.088865] env[65758]: DEBUG oslo_vmware.api [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1163.088865] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52737786-9057-b1c0-b624-9eea053a1774" [ 1163.088865] env[65758]: _type = "Task" [ 1163.088865] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.098756] env[65758]: DEBUG oslo_vmware.api [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52737786-9057-b1c0-b624-9eea053a1774, 'name': SearchDatastore_Task, 'duration_secs': 0.006826} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.099058] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.321086] env[65758]: DEBUG nova.network.neutron [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Successfully updated port: 8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1163.347160] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: ec1e2845-e73a-40ff-9b6c-1d8281859fba] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1163.374094] env[65758]: DEBUG nova.scheduler.client.report [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.389841] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661307, 'name': CreateVM_Task, 'duration_secs': 0.313359} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.390852] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1163.391402] env[65758]: WARNING neutronclient.v2_0.client [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1163.391784] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.391975] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.392313] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1163.392837] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8212e1bd-d89f-4852-8a05-d9c138d819e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.398075] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1163.398075] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d2949c-77cf-6529-f3af-45b03f76dc82" [ 1163.398075] env[65758]: _type = "Task" [ 1163.398075] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.407210] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d2949c-77cf-6529-f3af-45b03f76dc82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.825131] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.825471] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.825771] env[65758]: DEBUG nova.network.neutron [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1163.850783] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 9e007d55-0a5c-4469-a546-9b18e188bea0] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1163.878826] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.879386] env[65758]: DEBUG nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1163.881787] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.783s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.910983] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d2949c-77cf-6529-f3af-45b03f76dc82, 'name': SearchDatastore_Task, 'duration_secs': 0.010401} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.911333] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.911564] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1163.911793] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.911932] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.912121] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1163.912704] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-709fd06b-d28d-4fb0-a7d2-29bfa2bd94f2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.922901] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1163.923138] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1163.923952] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b9049f6-3b0e-431e-95c2-a501f5ddfab4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.932022] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1163.932022] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524a2914-6ea1-5d1f-d9ed-cfca8cef9201" [ 1163.932022] env[65758]: _type = "Task" [ 1163.932022] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.943171] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a2914-6ea1-5d1f-d9ed-cfca8cef9201, 'name': SearchDatastore_Task, 'duration_secs': 0.010324} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.944398] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69c39b95-2f2c-4497-a4ea-b381103e30f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.951370] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1163.951370] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52090bdb-c379-ce48-acf7-798f0aa1b46a" [ 1163.951370] env[65758]: _type = "Task" [ 1163.951370] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.960837] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52090bdb-c379-ce48-acf7-798f0aa1b46a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.329805] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1164.330268] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1164.355192] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 1e249ca9-a7a8-440f-832b-a8f5d84ada8b] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1164.379443] env[65758]: WARNING nova.network.neutron [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] 2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4 already exists in list: networks containing: ['2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4']. ignoring it [ 1164.388275] env[65758]: DEBUG nova.compute.utils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1164.390104] env[65758]: DEBUG nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1164.390474] env[65758]: DEBUG nova.network.neutron [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1164.390609] env[65758]: WARNING neutronclient.v2_0.client [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1164.390899] env[65758]: WARNING neutronclient.v2_0.client [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1164.391494] env[65758]: WARNING openstack [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1164.391851] env[65758]: WARNING openstack [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1164.450024] env[65758]: DEBUG nova.policy [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc85d2d1d84f4df0b4de5e6388bb9398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82bfbb5ee6714c9aa5119cb714d28ce2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.461742] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52090bdb-c379-ce48-acf7-798f0aa1b46a, 'name': SearchDatastore_Task, 'duration_secs': 0.01005} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.462061] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.462356] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] bc10286b-195f-48a2-b16c-f8f925ec7a2a/bc10286b-195f-48a2-b16c-f8f925ec7a2a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1164.462660] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d647f80e-8ca0-4bf8-8887-807461f80f0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.471291] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1164.471291] env[65758]: value = "task-4661308" [ 1164.471291] env[65758]: _type = "Task" [ 1164.471291] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.472710] env[65758]: WARNING neutronclient.v2_0.client [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1164.474953] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1164.474953] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1164.495086] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.524110] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee760aee-8f2b-426e-a554-1f7fa71b6b26 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.536652] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57a1c8b-e0e7-4644-9dc5-16c7cf00a5af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.576448] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a2dd75-1735-4b1c-b544-f47fdf31e94a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.587822] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02640114-b7a6-4d91-8596-b970e39eea53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.603830] env[65758]: DEBUG nova.compute.provider_tree [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.678088] env[65758]: WARNING neutronclient.v2_0.client [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1164.678829] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1164.679195] env[65758]: WARNING openstack [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1164.757607] env[65758]: DEBUG nova.network.neutron [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Successfully created port: 9084cee5-02d7-477c-8464-d70e0bfd1ef8 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1164.782725] env[65758]: DEBUG nova.network.neutron [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [{"id": "924f7463-7e8c-4f58-af04-46082cd691ed", "address": "fa:16:3e:33:06:f7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924f7463-7e", "ovs_interfaceid": "924f7463-7e8c-4f58-af04-46082cd691ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8964bfa9-6690-403d-9936-940d8087617c", "address": "fa:16:3e:f0:c3:e1", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8964bfa9-66", "ovs_interfaceid": "8964bfa9-6690-403d-9936-940d8087617c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1164.858563] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 3049c522-d3bc-4ccf-93bd-0d1efe41d1ca] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1164.899770] env[65758]: DEBUG nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1164.982242] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507731} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.982610] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] bc10286b-195f-48a2-b16c-f8f925ec7a2a/bc10286b-195f-48a2-b16c-f8f925ec7a2a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1164.982838] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1164.983125] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0eadd14-7ed3-4f49-ba87-6ca50a79af93 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.989998] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1164.989998] env[65758]: value = "task-4661309" [ 1164.989998] env[65758]: _type = "Task" [ 1164.989998] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.000788] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661309, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.109418] env[65758]: DEBUG nova.scheduler.client.report [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.285758] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.286531] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.286691] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.287658] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550ed878-ffed-471b-abda-1f1d2e2f85c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.306146] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1165.306424] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1165.306581] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1165.306767] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1165.306910] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1165.307064] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1165.307272] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1165.307428] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1165.307589] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1165.307744] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1165.307914] env[65758]: DEBUG nova.virt.hardware [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1165.314266] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Reconfiguring VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1165.314678] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1379863f-ee5d-4172-a273-7f26c43bb3c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.332490] env[65758]: DEBUG oslo_vmware.api [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1165.332490] env[65758]: value = "task-4661310" [ 1165.332490] env[65758]: _type = "Task" [ 1165.332490] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.342739] env[65758]: DEBUG oslo_vmware.api [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661310, 'name': ReconfigVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.362442] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 2d787237-26e5-4519-9f6e-1d30b9d016cf] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1165.501077] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661309, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104191} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.501400] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1165.502429] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0599063f-ab4e-4662-97df-50cee60acc57 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.526424] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] bc10286b-195f-48a2-b16c-f8f925ec7a2a/bc10286b-195f-48a2-b16c-f8f925ec7a2a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1165.526758] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-887615a5-aaa8-4267-aada-bb38e3ce4096 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.548285] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1165.548285] env[65758]: value = "task-4661311" [ 1165.548285] env[65758]: _type = "Task" [ 1165.548285] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.561489] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661311, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.843920] env[65758]: DEBUG oslo_vmware.api [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.865764] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 83fa942b-a195-4bcb-9ed5-5bb6764220a4] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1165.909664] env[65758]: DEBUG nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1165.938543] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1165.938867] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1165.939052] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1165.939245] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1165.939390] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1165.939531] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1165.939740] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1165.939896] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1165.940069] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1165.940251] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1165.940459] env[65758]: DEBUG nova.virt.hardware [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1165.941911] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ec9ed8-4e43-4d1f-a04d-b3516d958259 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.950965] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1dfaf4e-3399-4b12-a788-c36e166e1edd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.062058] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661311, 'name': ReconfigVM_Task, 'duration_secs': 0.298847} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.062382] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Reconfigured VM instance instance-0000006b to attach disk [datastore2] bc10286b-195f-48a2-b16c-f8f925ec7a2a/bc10286b-195f-48a2-b16c-f8f925ec7a2a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1166.063092] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec65f28f-e671-4e46-a5a7-341fe17994ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.072713] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1166.072713] env[65758]: value = "task-4661312" [ 1166.072713] env[65758]: _type = "Task" [ 1166.072713] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.085274] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661312, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.119909] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.238s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.346191] env[65758]: DEBUG oslo_vmware.api [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661310, 'name': ReconfigVM_Task, 'duration_secs': 0.747585} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.346191] env[65758]: WARNING neutronclient.v2_0.client [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1166.346191] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.346191] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Reconfigured VM to attach interface {{(pid=65758) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1166.352505] env[65758]: DEBUG nova.network.neutron [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Successfully updated port: 9084cee5-02d7-477c-8464-d70e0bfd1ef8 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1166.369237] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.583479] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661312, 'name': Rename_Task, 'duration_secs': 0.427593} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.583720] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.584018] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb77a8dd-3b29-4633-882b-16d05573f248 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.592272] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1166.592272] env[65758]: value = "task-4661313" [ 1166.592272] env[65758]: _type = "Task" [ 1166.592272] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.601654] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661313, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.689788] env[65758]: INFO nova.scheduler.client.report [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted allocation for migration df917240-8170-4ba6-ad2c-6a6ffd6eb131 [ 1166.855094] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f80a1d12-65fc-4cfa-908c-bb243786f1df tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-8d0419d1-c301-4302-80c1-cd0fce7ccba4-8964bfa9-6690-403d-9936-940d8087617c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.280s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.856630] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "refresh_cache-cdc1cfab-4f75-4caf-a4ee-8197af083353" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.856805] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "refresh_cache-cdc1cfab-4f75-4caf-a4ee-8197af083353" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.857020] env[65758]: DEBUG nova.network.neutron [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1167.104841] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661313, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.196314] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.945s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.362303] env[65758]: WARNING openstack [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1167.362303] env[65758]: WARNING openstack [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1167.369845] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.370038] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.209622] env[65758]: DEBUG nova.network.neutron [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1168.221021] env[65758]: DEBUG nova.compute.manager [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received event network-changed-924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1168.221021] env[65758]: DEBUG nova.compute.manager [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing instance network info cache due to event network-changed-924f7463-7e8c-4f58-af04-46082cd691ed. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1168.221125] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Acquiring lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.221275] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Acquired lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.221377] env[65758]: DEBUG nova.network.neutron [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing network info cache for port 924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1168.222420] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.223149] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.223705] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.223881] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.224057] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_power_states {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.231354] env[65758]: DEBUG oslo_vmware.api [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661313, 'name': PowerOnVM_Task, 'duration_secs': 0.612611} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.232780] env[65758]: DEBUG nova.compute.manager [req-1621b616-9813-44e9-8f3e-c7548d4fe0ed req-2db178c0-66e9-4c93-bb28-294c0d5d2073 service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Received event network-vif-plugged-9084cee5-02d7-477c-8464-d70e0bfd1ef8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1168.232998] env[65758]: DEBUG oslo_concurrency.lockutils [req-1621b616-9813-44e9-8f3e-c7548d4fe0ed req-2db178c0-66e9-4c93-bb28-294c0d5d2073 service nova] Acquiring lock "cdc1cfab-4f75-4caf-a4ee-8197af083353-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.233206] env[65758]: DEBUG oslo_concurrency.lockutils [req-1621b616-9813-44e9-8f3e-c7548d4fe0ed req-2db178c0-66e9-4c93-bb28-294c0d5d2073 service nova] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.233361] env[65758]: DEBUG oslo_concurrency.lockutils [req-1621b616-9813-44e9-8f3e-c7548d4fe0ed req-2db178c0-66e9-4c93-bb28-294c0d5d2073 service nova] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.233567] env[65758]: DEBUG nova.compute.manager [req-1621b616-9813-44e9-8f3e-c7548d4fe0ed req-2db178c0-66e9-4c93-bb28-294c0d5d2073 service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] No waiting events found dispatching network-vif-plugged-9084cee5-02d7-477c-8464-d70e0bfd1ef8 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1168.233725] env[65758]: WARNING nova.compute.manager [req-1621b616-9813-44e9-8f3e-c7548d4fe0ed req-2db178c0-66e9-4c93-bb28-294c0d5d2073 service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Received unexpected event network-vif-plugged-9084cee5-02d7-477c-8464-d70e0bfd1ef8 for instance with vm_state building and task_state spawning. [ 1168.234071] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1168.234343] env[65758]: INFO nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1168.234530] env[65758]: DEBUG nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1168.235796] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d0f680-8964-464d-9d36-53a9a49e28bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.306991] env[65758]: WARNING neutronclient.v2_0.client [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1168.307705] env[65758]: WARNING openstack [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1168.308067] env[65758]: WARNING openstack [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1168.400312] env[65758]: DEBUG nova.network.neutron [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Updating instance_info_cache with network_info: [{"id": "9084cee5-02d7-477c-8464-d70e0bfd1ef8", "address": "fa:16:3e:07:a6:e5", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9084cee5-02", "ovs_interfaceid": "9084cee5-02d7-477c-8464-d70e0bfd1ef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1168.724108] env[65758]: WARNING neutronclient.v2_0.client [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1168.724484] env[65758]: WARNING openstack [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1168.724912] env[65758]: WARNING openstack [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1168.735691] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Getting list of instances from cluster (obj){ [ 1168.735691] env[65758]: value = "domain-c8" [ 1168.735691] env[65758]: _type = "ClusterComputeResource" [ 1168.735691] env[65758]: } {{(pid=65758) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1168.737108] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90224158-5edd-47b1-ada7-1c065d018cbf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.742847] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.743070] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.002s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.760347] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Got total of 7 instances {{(pid=65758) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1168.760501] env[65758]: WARNING nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] While synchronizing instance power states, found 8 instances in the database and 7 instances on the hypervisor. [ 1168.760631] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Triggering sync for uuid 37aadd44-79e8-4479-862f-265549c9d802 {{(pid=65758) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1168.760906] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Triggering sync for uuid 47cebd84-f9a1-4997-96aa-c76c5faa8c81 {{(pid=65758) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1168.761134] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Triggering sync for uuid 62ae50af-ff52-4084-8161-1a650eff5247 {{(pid=65758) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1168.761328] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Triggering sync for uuid 8d0419d1-c301-4302-80c1-cd0fce7ccba4 {{(pid=65758) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1168.761517] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Triggering sync for uuid ba16e0fe-6748-4d14-bb28-a65d63a2274d {{(pid=65758) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1168.762216] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Triggering sync for uuid b5bbff6b-42e9-4938-b4b3-05a9d5826d1c {{(pid=65758) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1168.762216] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Triggering sync for uuid bc10286b-195f-48a2-b16c-f8f925ec7a2a {{(pid=65758) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1168.762216] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Triggering sync for uuid cdc1cfab-4f75-4caf-a4ee-8197af083353 {{(pid=65758) _sync_power_states /opt/stack/nova/nova/compute/manager.py:11027}} [ 1168.764744] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "37aadd44-79e8-4479-862f-265549c9d802" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.764962] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "37aadd44-79e8-4479-862f-265549c9d802" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.765257] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.765430] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.765660] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "62ae50af-ff52-4084-8161-1a650eff5247" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.765834] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "62ae50af-ff52-4084-8161-1a650eff5247" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.766075] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.766248] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.766475] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.766681] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.766852] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.767129] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.767353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.767528] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.767654] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1168.768063] env[65758]: INFO nova.compute.manager [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Took 15.94 seconds to build instance. [ 1168.772019] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995ece47-496c-44a7-ad3d-6b376c88add6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.774935] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c88d593-a1bb-458d-9fce-c9b0e8e4ca4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.777758] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fa830a-22c3-4ba8-b6f8-c357fa43093b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.780349] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd4f4e4-bd7b-4e2e-b243-69d36993b35e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.783094] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5361772-e331-46f1-8fa1-15c76b370b9e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.785415] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.786399] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e93ab12-3753-4444-bc3f-293cbe600359 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.970s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.787162] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.020s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.787342] env[65758]: INFO nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] During sync_power_state the instance has a pending task (spawning). Skip. [ 1168.787711] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.902076] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "refresh_cache-cdc1cfab-4f75-4caf-a4ee-8197af083353" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.902491] env[65758]: DEBUG nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Instance network_info: |[{"id": "9084cee5-02d7-477c-8464-d70e0bfd1ef8", "address": "fa:16:3e:07:a6:e5", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9084cee5-02", "ovs_interfaceid": "9084cee5-02d7-477c-8464-d70e0bfd1ef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1168.902977] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:a6:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9084cee5-02d7-477c-8464-d70e0bfd1ef8', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1168.910697] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating folder: Project (82bfbb5ee6714c9aa5119cb714d28ce2). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1168.911580] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7133db7b-2bed-4775-9b88-dc59e6b8e93b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.923287] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created folder: Project (82bfbb5ee6714c9aa5119cb714d28ce2) in parent group-v909763. [ 1168.923444] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating folder: Instances. Parent ref: group-v910062. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1168.923641] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c89fb2a-025f-46ae-8ac4-e68f7722eeb5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.930428] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "interface-8d0419d1-c301-4302-80c1-cd0fce7ccba4-8964bfa9-6690-403d-9936-940d8087617c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.930660] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-8d0419d1-c301-4302-80c1-cd0fce7ccba4-8964bfa9-6690-403d-9936-940d8087617c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.933036] env[65758]: WARNING neutronclient.v2_0.client [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1168.933633] env[65758]: WARNING openstack [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1168.933986] env[65758]: WARNING openstack [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1168.943962] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created folder: Instances in parent group-v910062. [ 1168.944232] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1168.944426] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1168.944639] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f848334d-5c55-4a50-8252-1e9ef5481508 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.965529] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1168.965529] env[65758]: value = "task-4661317" [ 1168.965529] env[65758]: _type = "Task" [ 1168.965529] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.974055] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661317, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.021357] env[65758]: DEBUG nova.network.neutron [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updated VIF entry in instance network info cache for port 924f7463-7e8c-4f58-af04-46082cd691ed. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1169.021720] env[65758]: DEBUG nova.network.neutron [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [{"id": "924f7463-7e8c-4f58-af04-46082cd691ed", "address": "fa:16:3e:33:06:f7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924f7463-7e", "ovs_interfaceid": "924f7463-7e8c-4f58-af04-46082cd691ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8964bfa9-6690-403d-9936-940d8087617c", "address": "fa:16:3e:f0:c3:e1", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8964bfa9-66", "ovs_interfaceid": "8964bfa9-6690-403d-9936-940d8087617c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1169.083759] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.246166] env[65758]: DEBUG nova.compute.utils [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1169.303628] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.536s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.304459] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.538s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.304925] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "37aadd44-79e8-4479-862f-265549c9d802" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.305489] env[65758]: INFO nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] During sync_power_state the instance has a pending task (deleting). Skip. [ 1169.305718] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.306295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "62ae50af-ff52-4084-8161-1a650eff5247" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.306593] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.521s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.306814] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.307031] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.307214] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.309478] env[65758]: INFO nova.compute.manager [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Terminating instance [ 1169.311245] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.228s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.311483] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.311687] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.311883] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.313864] env[65758]: INFO nova.compute.manager [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Terminating instance [ 1169.444314] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.444635] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.445580] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75e388d-7e31-4ff7-b0f5-27fbfecba54d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.470071] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba63cbf9-6029-409a-9333-f15d81b65e82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.478301] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661317, 'name': CreateVM_Task, 'duration_secs': 0.415536} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.493949] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1169.494444] env[65758]: WARNING neutronclient.v2_0.client [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1169.499718] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Reconfiguring VM to detach interface {{(pid=65758) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1169.500191] env[65758]: WARNING neutronclient.v2_0.client [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1169.500518] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.500668] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.500977] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1169.501226] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ae6e6ff-0192-49ba-ae7a-733c39ccee72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.513998] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24e557a5-8769-49af-8cc8-37313e0c4579 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.520589] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1169.520589] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bb277f-548a-0a90-b150-e01f8be190af" [ 1169.520589] env[65758]: _type = "Task" [ 1169.520589] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.524987] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1169.524987] env[65758]: value = "task-4661318" [ 1169.524987] env[65758]: _type = "Task" [ 1169.524987] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.525932] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Releasing lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.526196] env[65758]: DEBUG nova.compute.manager [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received event network-vif-plugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1169.526374] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.526572] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.526802] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.526906] env[65758]: DEBUG nova.compute.manager [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] No waiting events found dispatching network-vif-plugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1169.527055] env[65758]: WARNING nova.compute.manager [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received unexpected event network-vif-plugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b for instance with vm_state building and task_state spawning. [ 1169.527217] env[65758]: DEBUG nova.compute.manager [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received event network-changed-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1169.527365] env[65758]: DEBUG nova.compute.manager [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Refreshing instance network info cache due to event network-changed-f3dcfa87-c097-4b94-bab6-e9fd7455605b. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1169.527551] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Acquiring lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.527671] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Acquired lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.527823] env[65758]: DEBUG nova.network.neutron [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Refreshing network info cache for port f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1169.535515] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bb277f-548a-0a90-b150-e01f8be190af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.541392] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.749697] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.750306] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.984s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.751242] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94267b02-c376-48dd-9953-7886c5857285 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.818322] env[65758]: DEBUG nova.compute.manager [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1169.818644] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1169.819350] env[65758]: DEBUG nova.compute.manager [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1169.819584] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1169.820556] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb33f8b-46b0-4c85-905f-c549299f2dc6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.824897] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433cad58-b990-4958-8189-cd0ca4d3da1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.833191] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.835195] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-376bf1bb-c969-4ae7-baca-8cab47cff0af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.836922] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.837205] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06ca1625-d990-4352-b9b0-4f52a328eaaa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.844477] env[65758]: DEBUG oslo_vmware.api [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1169.844477] env[65758]: value = "task-4661320" [ 1169.844477] env[65758]: _type = "Task" [ 1169.844477] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.845820] env[65758]: DEBUG oslo_vmware.api [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1169.845820] env[65758]: value = "task-4661319" [ 1169.845820] env[65758]: _type = "Task" [ 1169.845820] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.863713] env[65758]: DEBUG oslo_vmware.api [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661320, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.864132] env[65758]: DEBUG oslo_vmware.api [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661319, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.035563] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bb277f-548a-0a90-b150-e01f8be190af, 'name': SearchDatastore_Task, 'duration_secs': 0.010514} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.036664] env[65758]: WARNING neutronclient.v2_0.client [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1170.037402] env[65758]: WARNING openstack [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1170.037764] env[65758]: WARNING openstack [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1170.045921] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.046244] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1170.046551] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.046802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.047129] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.051043] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-282842d2-af8d-4b7c-81fd-57fb49be6e43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.052949] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.070351] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.070597] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1170.071721] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2b11fd6-1f71-41d0-8505-a0d68517902f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.081473] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1170.081473] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521a777a-0ec0-2784-f789-1a9f72d03e01" [ 1170.081473] env[65758]: _type = "Task" [ 1170.081473] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.093169] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521a777a-0ec0-2784-f789-1a9f72d03e01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.214360] env[65758]: WARNING neutronclient.v2_0.client [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1170.215405] env[65758]: WARNING openstack [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1170.215623] env[65758]: WARNING openstack [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1170.241486] env[65758]: DEBUG nova.compute.manager [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received event network-vif-plugged-8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1170.241717] env[65758]: DEBUG oslo_concurrency.lockutils [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Acquiring lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.241970] env[65758]: DEBUG oslo_concurrency.lockutils [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.242138] env[65758]: DEBUG oslo_concurrency.lockutils [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.242278] env[65758]: DEBUG nova.compute.manager [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] No waiting events found dispatching network-vif-plugged-8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1170.242518] env[65758]: WARNING nova.compute.manager [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received unexpected event network-vif-plugged-8964bfa9-6690-403d-9936-940d8087617c for instance with vm_state active and task_state None. [ 1170.242708] env[65758]: DEBUG nova.compute.manager [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received event network-changed-8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1170.242991] env[65758]: DEBUG nova.compute.manager [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing instance network info cache due to event network-changed-8964bfa9-6690-403d-9936-940d8087617c. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1170.243212] env[65758]: DEBUG oslo_concurrency.lockutils [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Acquiring lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.243349] env[65758]: DEBUG oslo_concurrency.lockutils [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Acquired lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.243502] env[65758]: DEBUG nova.network.neutron [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Refreshing network info cache for port 8964bfa9-6690-403d-9936-940d8087617c {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1170.262058] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.511s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.271548] env[65758]: DEBUG nova.compute.manager [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Received event network-changed-9084cee5-02d7-477c-8464-d70e0bfd1ef8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1170.271785] env[65758]: DEBUG nova.compute.manager [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Refreshing instance network info cache due to event network-changed-9084cee5-02d7-477c-8464-d70e0bfd1ef8. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1170.272019] env[65758]: DEBUG oslo_concurrency.lockutils [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Acquiring lock "refresh_cache-cdc1cfab-4f75-4caf-a4ee-8197af083353" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.272434] env[65758]: DEBUG oslo_concurrency.lockutils [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Acquired lock "refresh_cache-cdc1cfab-4f75-4caf-a4ee-8197af083353" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.272434] env[65758]: DEBUG nova.network.neutron [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Refreshing network info cache for port 9084cee5-02d7-477c-8464-d70e0bfd1ef8 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1170.305344] env[65758]: DEBUG nova.network.neutron [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updated VIF entry in instance network info cache for port f3dcfa87-c097-4b94-bab6-e9fd7455605b. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1170.305735] env[65758]: DEBUG nova.network.neutron [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [{"id": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "address": "fa:16:3e:9d:65:b5", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dcfa87-c0", "ovs_interfaceid": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1170.360911] env[65758]: DEBUG oslo_vmware.api [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661320, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.364468] env[65758]: DEBUG oslo_vmware.api [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661319, 'name': PowerOffVM_Task, 'duration_secs': 0.220783} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.364757] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1170.364925] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1170.365218] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55499698-8fff-4f1b-9a84-ad00336b234c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.450776] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1170.450996] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1170.451190] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleting the datastore file [datastore2] b5bbff6b-42e9-4938-b4b3-05a9d5826d1c {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1170.451692] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e762c89b-2c28-404e-9c45-41c7b2e39437 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.459561] env[65758]: DEBUG oslo_vmware.api [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for the task: (returnval){ [ 1170.459561] env[65758]: value = "task-4661322" [ 1170.459561] env[65758]: _type = "Task" [ 1170.459561] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.468435] env[65758]: DEBUG oslo_vmware.api [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.536298] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.594500] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521a777a-0ec0-2784-f789-1a9f72d03e01, 'name': SearchDatastore_Task, 'duration_secs': 0.019011} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.595443] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e450dae1-5583-44ad-a3bf-003115f386ba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.600750] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1170.600750] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52959a86-a926-f3f2-087c-71ae1426f02a" [ 1170.600750] env[65758]: _type = "Task" [ 1170.600750] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.608799] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52959a86-a926-f3f2-087c-71ae1426f02a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.746076] env[65758]: WARNING neutronclient.v2_0.client [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1170.746793] env[65758]: WARNING openstack [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1170.747183] env[65758]: WARNING openstack [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1170.775068] env[65758]: WARNING neutronclient.v2_0.client [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1170.775715] env[65758]: WARNING openstack [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1170.776107] env[65758]: WARNING openstack [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1170.808764] env[65758]: DEBUG oslo_concurrency.lockutils [req-c012e359-6b34-42df-953e-5aaf393e657c req-8164d3d4-a721-4a89-b9d4-09f876a34cfb service nova] Releasing lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.816373] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.816770] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.817054] env[65758]: INFO nova.compute.manager [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Attaching volume 760cbc35-0376-4e51-a795-3bea9254770b to /dev/sdb [ 1170.857245] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6f0e74-5879-43c8-bc0c-d527c79f7dd7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.865454] env[65758]: DEBUG oslo_vmware.api [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661320, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.876720] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3894393-f8a0-48a4-9220-d2ed19322685 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.896842] env[65758]: DEBUG nova.virt.block_device [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating existing volume attachment record: bf75fbd5-d8b0-49be-9fbb-c6d37b3dfb30 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1170.951054] env[65758]: WARNING neutronclient.v2_0.client [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1170.951501] env[65758]: WARNING openstack [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1170.951897] env[65758]: WARNING openstack [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1170.961098] env[65758]: WARNING neutronclient.v2_0.client [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1170.961710] env[65758]: WARNING openstack [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1170.962070] env[65758]: WARNING openstack [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1170.979515] env[65758]: DEBUG oslo_vmware.api [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Task: {'id': task-4661322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.459072} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.979515] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.979739] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.979772] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1170.979924] env[65758]: INFO nova.compute.manager [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1170.980364] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1170.980666] env[65758]: DEBUG nova.compute.manager [-] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1170.980801] env[65758]: DEBUG nova.network.neutron [-] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1170.981116] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1170.981660] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1170.981924] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1171.042656] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.053352] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1171.078384] env[65758]: DEBUG nova.network.neutron [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Updated VIF entry in instance network info cache for port 9084cee5-02d7-477c-8464-d70e0bfd1ef8. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1171.079426] env[65758]: DEBUG nova.network.neutron [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Updating instance_info_cache with network_info: [{"id": "9084cee5-02d7-477c-8464-d70e0bfd1ef8", "address": "fa:16:3e:07:a6:e5", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9084cee5-02", "ovs_interfaceid": "9084cee5-02d7-477c-8464-d70e0bfd1ef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1171.107571] env[65758]: DEBUG nova.network.neutron [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updated VIF entry in instance network info cache for port 8964bfa9-6690-403d-9936-940d8087617c. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1171.107571] env[65758]: DEBUG nova.network.neutron [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [{"id": "924f7463-7e8c-4f58-af04-46082cd691ed", "address": "fa:16:3e:33:06:f7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924f7463-7e", "ovs_interfaceid": "924f7463-7e8c-4f58-af04-46082cd691ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8964bfa9-6690-403d-9936-940d8087617c", "address": "fa:16:3e:f0:c3:e1", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8964bfa9-66", "ovs_interfaceid": "8964bfa9-6690-403d-9936-940d8087617c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1171.115920] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52959a86-a926-f3f2-087c-71ae1426f02a, 'name': SearchDatastore_Task, 'duration_secs': 0.009282} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.116300] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.116463] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] cdc1cfab-4f75-4caf-a4ee-8197af083353/cdc1cfab-4f75-4caf-a4ee-8197af083353.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1171.116732] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dcecd93c-0ce8-4fd1-a1ef-a72d6fe7d9f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.125305] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1171.125305] env[65758]: value = "task-4661324" [ 1171.125305] env[65758]: _type = "Task" [ 1171.125305] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.135714] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.361237] env[65758]: DEBUG oslo_vmware.api [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661320, 'name': PowerOffVM_Task, 'duration_secs': 1.202244} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.361623] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.361758] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1171.361958] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12e8f42d-18da-41c0-afa8-c8979d224bc0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.447749] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1171.448031] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1171.448226] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleting the datastore file [datastore1] 47cebd84-f9a1-4997-96aa-c76c5faa8c81 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.448605] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1b2957d-f2ce-4077-be52-f5fd50256827 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.457755] env[65758]: DEBUG oslo_vmware.api [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1171.457755] env[65758]: value = "task-4661328" [ 1171.457755] env[65758]: _type = "Task" [ 1171.457755] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.468243] env[65758]: DEBUG oslo_vmware.api [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.542457] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.583602] env[65758]: DEBUG oslo_concurrency.lockutils [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Releasing lock "refresh_cache-cdc1cfab-4f75-4caf-a4ee-8197af083353" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.584122] env[65758]: DEBUG nova.compute.manager [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received event network-changed-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1171.584402] env[65758]: DEBUG nova.compute.manager [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Refreshing instance network info cache due to event network-changed-f3dcfa87-c097-4b94-bab6-e9fd7455605b. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1171.584745] env[65758]: DEBUG oslo_concurrency.lockutils [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Acquiring lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.584966] env[65758]: DEBUG oslo_concurrency.lockutils [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Acquired lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.585222] env[65758]: DEBUG nova.network.neutron [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Refreshing network info cache for port f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1171.611145] env[65758]: DEBUG oslo_concurrency.lockutils [req-41e00241-a8c3-4794-b990-0c845f3afc05 req-1b0085df-b736-4eae-8bcc-8148acbd1f2b service nova] Releasing lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.637478] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661324, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.642190] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "37aadd44-79e8-4479-862f-265549c9d802" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.642483] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "37aadd44-79e8-4479-862f-265549c9d802" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.642712] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "37aadd44-79e8-4479-862f-265549c9d802-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.642988] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "37aadd44-79e8-4479-862f-265549c9d802-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.643206] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "37aadd44-79e8-4479-862f-265549c9d802-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.645597] env[65758]: INFO nova.compute.manager [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Terminating instance [ 1171.857073] env[65758]: DEBUG nova.network.neutron [-] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1171.968047] env[65758]: DEBUG oslo_vmware.api [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.499454} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.968047] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1171.968241] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1171.968388] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1171.968544] env[65758]: INFO nova.compute.manager [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1171.968787] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1171.968977] env[65758]: DEBUG nova.compute.manager [-] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1171.969099] env[65758]: DEBUG nova.network.neutron [-] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1171.969321] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1171.969907] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1171.970202] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1172.008883] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1172.041797] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.088957] env[65758]: WARNING neutronclient.v2_0.client [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1172.089630] env[65758]: WARNING openstack [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1172.089988] env[65758]: WARNING openstack [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1172.135707] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661324, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586561} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.136016] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] cdc1cfab-4f75-4caf-a4ee-8197af083353/cdc1cfab-4f75-4caf-a4ee-8197af083353.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1172.136236] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1172.136502] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a185ab75-6399-4177-aea1-8836e100048c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.143072] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1172.143072] env[65758]: value = "task-4661329" [ 1172.143072] env[65758]: _type = "Task" [ 1172.143072] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.149527] env[65758]: DEBUG nova.compute.manager [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1172.149844] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1172.154392] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4b229c-fa20-463c-925d-e5d8701f94ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.156987] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.165536] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1172.165824] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-794e073d-3136-4ab2-b413-52fb232b0267 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.172490] env[65758]: DEBUG oslo_vmware.api [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1172.172490] env[65758]: value = "task-4661330" [ 1172.172490] env[65758]: _type = "Task" [ 1172.172490] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.181480] env[65758]: DEBUG oslo_vmware.api [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661330, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.260318] env[65758]: WARNING neutronclient.v2_0.client [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1172.261148] env[65758]: WARNING openstack [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1172.261311] env[65758]: WARNING openstack [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1172.279747] env[65758]: DEBUG nova.compute.manager [req-6641cc60-c6b0-4e4c-9dc5-41caed52b3c1 req-0143d8aa-1f0a-4763-9856-ab84def96e50 service nova] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Received event network-vif-deleted-e06e56fe-7299-46f2-9238-9f1351c4ce06 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1172.360249] env[65758]: INFO nova.compute.manager [-] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Took 1.38 seconds to deallocate network for instance. [ 1172.370375] env[65758]: DEBUG nova.network.neutron [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updated VIF entry in instance network info cache for port f3dcfa87-c097-4b94-bab6-e9fd7455605b. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1172.370375] env[65758]: DEBUG nova.network.neutron [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [{"id": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "address": "fa:16:3e:9d:65:b5", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dcfa87-c0", "ovs_interfaceid": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1172.542496] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.652952] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081972} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.653273] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1172.654072] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0ec39a-aaf5-40a1-a71d-5d82ffaf9129 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.676386] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] cdc1cfab-4f75-4caf-a4ee-8197af083353/cdc1cfab-4f75-4caf-a4ee-8197af083353.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1172.676727] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63612cd8-8795-49cb-8c88-4cf0b14c0752 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.701013] env[65758]: DEBUG oslo_vmware.api [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661330, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.702408] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1172.702408] env[65758]: value = "task-4661331" [ 1172.702408] env[65758]: _type = "Task" [ 1172.702408] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.712300] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661331, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.873189] env[65758]: DEBUG oslo_concurrency.lockutils [req-bb5dc0b1-d904-463e-a9aa-5aea415f335a req-390e3242-de0c-41e1-bc36-37cf666d96f8 service nova] Releasing lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.874932] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.875262] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.875493] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.916345] env[65758]: INFO nova.scheduler.client.report [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Deleted allocations for instance b5bbff6b-42e9-4938-b4b3-05a9d5826d1c [ 1172.951383] env[65758]: DEBUG nova.network.neutron [-] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1173.043089] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.186410] env[65758]: DEBUG oslo_vmware.api [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661330, 'name': PowerOffVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.212383] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661331, 'name': ReconfigVM_Task, 'duration_secs': 0.289721} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.212735] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Reconfigured VM instance instance-0000006c to attach disk [datastore2] cdc1cfab-4f75-4caf-a4ee-8197af083353/cdc1cfab-4f75-4caf-a4ee-8197af083353.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1173.213442] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0f09697-73e5-4a61-b0a0-d1f3856d313a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.221225] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1173.221225] env[65758]: value = "task-4661332" [ 1173.221225] env[65758]: _type = "Task" [ 1173.221225] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.231961] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661332, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.428531] env[65758]: DEBUG oslo_concurrency.lockutils [None req-92fcda07-e107-48c5-9447-a197494e2559 tempest-DeleteServersTestJSON-1083168368 tempest-DeleteServersTestJSON-1083168368-project-member] Lock "b5bbff6b-42e9-4938-b4b3-05a9d5826d1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.121s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.454884] env[65758]: INFO nova.compute.manager [-] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Took 1.49 seconds to deallocate network for instance. [ 1173.542459] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.687277] env[65758]: DEBUG oslo_vmware.api [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661330, 'name': PowerOffVM_Task, 'duration_secs': 1.245725} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.687639] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1173.687885] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1173.688198] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d26fa24f-d4e1-4601-84f4-cf33f52c6b5d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.732076] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661332, 'name': Rename_Task, 'duration_secs': 0.368001} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.732423] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1173.732793] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7624e82-555a-4131-9935-c91a9176abd7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.741032] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1173.741032] env[65758]: value = "task-4661335" [ 1173.741032] env[65758]: _type = "Task" [ 1173.741032] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.751184] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.767486] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1173.767760] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1173.767993] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleting the datastore file [datastore2] 37aadd44-79e8-4479-862f-265549c9d802 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1173.768273] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e8ec140-cbfb-4696-b7a4-7862e0e9a313 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.776387] env[65758]: DEBUG oslo_vmware.api [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for the task: (returnval){ [ 1173.776387] env[65758]: value = "task-4661336" [ 1173.776387] env[65758]: _type = "Task" [ 1173.776387] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.788232] env[65758]: DEBUG oslo_vmware.api [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.961223] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.961502] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.961688] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.981337] env[65758]: INFO nova.scheduler.client.report [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted allocations for instance 47cebd84-f9a1-4997-96aa-c76c5faa8c81 [ 1174.047460] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.252767] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661335, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.288302] env[65758]: DEBUG oslo_vmware.api [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Task: {'id': task-4661336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228255} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.288580] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1174.288757] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1174.288944] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1174.289124] env[65758]: INFO nova.compute.manager [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Took 2.14 seconds to destroy the instance on the hypervisor. [ 1174.289367] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1174.289578] env[65758]: DEBUG nova.compute.manager [-] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1174.289673] env[65758]: DEBUG nova.network.neutron [-] [instance: 37aadd44-79e8-4479-862f-265549c9d802] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1174.289928] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1174.290657] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1174.290722] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1174.319039] env[65758]: DEBUG nova.compute.manager [req-7a3c3d3b-ac0a-449d-a249-846fc1d7a67e req-4f9a6b89-8d03-4464-920c-92ac8300736f service nova] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Received event network-vif-deleted-67e62b92-0851-4648-b7d7-181b274c8325 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1174.376483] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1174.490546] env[65758]: DEBUG oslo_concurrency.lockutils [None req-11b536db-320f-4dda-9cb5-3a7df7404032 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "47cebd84-f9a1-4997-96aa-c76c5faa8c81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.179s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.544739] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.752555] env[65758]: DEBUG oslo_vmware.api [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661335, 'name': PowerOnVM_Task, 'duration_secs': 0.520833} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.752893] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1174.753108] env[65758]: INFO nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Took 8.84 seconds to spawn the instance on the hypervisor. [ 1174.753284] env[65758]: DEBUG nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1174.754063] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14b537f-89d5-43dd-ae7d-fba93b4b6fb3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.045170] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task} progress is 18%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.081238] env[65758]: DEBUG nova.network.neutron [-] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1175.225190] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "1f773924-74ee-4151-81ba-d105ce225289" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.225470] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "1f773924-74ee-4151-81ba-d105ce225289" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.270703] env[65758]: INFO nova.compute.manager [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Took 13.67 seconds to build instance. [ 1175.452726] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1175.453072] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910066', 'volume_id': '760cbc35-0376-4e51-a795-3bea9254770b', 'name': 'volume-760cbc35-0376-4e51-a795-3bea9254770b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ba16e0fe-6748-4d14-bb28-a65d63a2274d', 'attached_at': '', 'detached_at': '', 'volume_id': '760cbc35-0376-4e51-a795-3bea9254770b', 'serial': '760cbc35-0376-4e51-a795-3bea9254770b'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1175.454010] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261064cf-7b1c-402b-a60f-c6392dcc7560 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.471313] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a1be43-40b5-4533-8980-360e882dcc53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.498849] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-760cbc35-0376-4e51-a795-3bea9254770b/volume-760cbc35-0376-4e51-a795-3bea9254770b.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1175.499199] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1843bc4b-f21d-4a2a-ab03-7befe27a4abc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.518177] env[65758]: DEBUG oslo_vmware.api [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1175.518177] env[65758]: value = "task-4661338" [ 1175.518177] env[65758]: _type = "Task" [ 1175.518177] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.527431] env[65758]: DEBUG oslo_vmware.api [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.544826] env[65758]: DEBUG oslo_vmware.api [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661318, 'name': ReconfigVM_Task, 'duration_secs': 5.787099} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.545118] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.545325] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Reconfigured VM to detach interface {{(pid=65758) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1175.545753] env[65758]: WARNING neutronclient.v2_0.client [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1175.546187] env[65758]: WARNING neutronclient.v2_0.client [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1175.546771] env[65758]: WARNING openstack [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1175.547254] env[65758]: WARNING openstack [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1175.584043] env[65758]: INFO nova.compute.manager [-] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Took 1.29 seconds to deallocate network for instance. [ 1175.593088] env[65758]: WARNING neutronclient.v2_0.client [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1175.728270] env[65758]: DEBUG nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1175.775244] env[65758]: DEBUG oslo_concurrency.lockutils [None req-afd1e2f9-4dea-4922-ae40-b8d23f34f9f6 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.179s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.776036] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.008s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.776036] env[65758]: INFO nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] During sync_power_state the instance has a pending task (spawning). Skip. [ 1175.776036] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.029686] env[65758]: DEBUG oslo_vmware.api [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661338, 'name': ReconfigVM_Task, 'duration_secs': 0.425737} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.029994] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-760cbc35-0376-4e51-a795-3bea9254770b/volume-760cbc35-0376-4e51-a795-3bea9254770b.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1176.037529] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c443d013-2ddb-444b-98ec-015e534434bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.053688] env[65758]: DEBUG oslo_vmware.api [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1176.053688] env[65758]: value = "task-4661339" [ 1176.053688] env[65758]: _type = "Task" [ 1176.053688] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.062846] env[65758]: DEBUG oslo_vmware.api [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.090232] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.090541] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.090779] env[65758]: DEBUG nova.objects.instance [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lazy-loading 'resources' on Instance uuid 37aadd44-79e8-4479-862f-265549c9d802 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1176.255208] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.349357] env[65758]: DEBUG nova.compute.manager [req-1059135d-6e40-465f-a186-84e22055dd7f req-33c2cc2b-74f3-41ea-83b0-f70b902d0b77 service nova] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Received event network-vif-deleted-b574c870-790b-4dad-8dce-58d93bb6fe44 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1176.564126] env[65758]: DEBUG oslo_vmware.api [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661339, 'name': ReconfigVM_Task, 'duration_secs': 0.157698} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.564510] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910066', 'volume_id': '760cbc35-0376-4e51-a795-3bea9254770b', 'name': 'volume-760cbc35-0376-4e51-a795-3bea9254770b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ba16e0fe-6748-4d14-bb28-a65d63a2274d', 'attached_at': '', 'detached_at': '', 'volume_id': '760cbc35-0376-4e51-a795-3bea9254770b', 'serial': '760cbc35-0376-4e51-a795-3bea9254770b'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1176.693558] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42dece0-6191-41cc-8d75-823dbdc94dd6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.701760] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723f62cc-55ca-49ee-98c8-d0650660e915 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.740141] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c5ee7a-0ddd-484f-9492-d0b760fe0d40 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.749943] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a1d8f6-0122-4af3-b5d7-5e339b11ce12 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.766405] env[65758]: DEBUG nova.compute.provider_tree [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.897009] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.897158] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquired lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.897259] env[65758]: DEBUG nova.network.neutron [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1177.270026] env[65758]: DEBUG nova.scheduler.client.report [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.400990] env[65758]: WARNING neutronclient.v2_0.client [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1177.401741] env[65758]: WARNING openstack [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1177.402120] env[65758]: WARNING openstack [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1177.414371] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "2014e795-5c62-47c2-9574-2f32ba29638d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.414607] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "2014e795-5c62-47c2-9574-2f32ba29638d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.419399] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.419787] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.420137] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.420851] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.420851] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.423424] env[65758]: INFO nova.compute.manager [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Terminating instance [ 1177.593035] env[65758]: WARNING neutronclient.v2_0.client [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1177.593724] env[65758]: WARNING openstack [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1177.594108] env[65758]: WARNING openstack [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1177.607560] env[65758]: DEBUG nova.objects.instance [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'flavor' on Instance uuid ba16e0fe-6748-4d14-bb28-a65d63a2274d {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.682944] env[65758]: INFO nova.network.neutron [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Port 8964bfa9-6690-403d-9936-940d8087617c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1177.682944] env[65758]: DEBUG nova.network.neutron [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [{"id": "924f7463-7e8c-4f58-af04-46082cd691ed", "address": "fa:16:3e:33:06:f7", "network": {"id": "2d9ed8a6-3bd5-4110-adc7-d8e8d5d988e4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1266491304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64ffccae76ed401582dd915ae5f87922", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924f7463-7e", "ovs_interfaceid": "924f7463-7e8c-4f58-af04-46082cd691ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1177.774900] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.777663] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.522s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.779326] env[65758]: INFO nova.compute.claims [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1177.800505] env[65758]: INFO nova.scheduler.client.report [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Deleted allocations for instance 37aadd44-79e8-4479-862f-265549c9d802 [ 1177.917368] env[65758]: DEBUG nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1177.927776] env[65758]: DEBUG nova.compute.manager [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1177.927776] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1177.928771] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7db3fb9-3c3b-4d55-91fc-8afd57516648 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.938587] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1177.939361] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e4da8f8-ce31-4abd-ae7b-d65e0e5b4595 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.947898] env[65758]: DEBUG oslo_vmware.api [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1177.947898] env[65758]: value = "task-4661340" [ 1177.947898] env[65758]: _type = "Task" [ 1177.947898] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.958693] env[65758]: DEBUG oslo_vmware.api [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661340, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.116653] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ed3cecba-5a73-4fd0-a1c5-ee6fe0812ed9 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.300s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.186873] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Releasing lock "refresh_cache-8d0419d1-c301-4302-80c1-cd0fce7ccba4" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.313739] env[65758]: DEBUG oslo_concurrency.lockutils [None req-52811689-1087-4a86-a49a-d1d99de9e9ac tempest-ServerActionsTestOtherA-1835889180 tempest-ServerActionsTestOtherA-1835889180-project-member] Lock "37aadd44-79e8-4479-862f-265549c9d802" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.671s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.437636] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.460191] env[65758]: DEBUG oslo_vmware.api [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661340, 'name': PowerOffVM_Task, 'duration_secs': 0.268034} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.460475] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1178.460640] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1178.460895] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9af04697-bc0c-4090-ab6b-7a56060f07b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.538713] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1178.538976] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1178.539201] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleting the datastore file [datastore1] 8d0419d1-c301-4302-80c1-cd0fce7ccba4 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1178.540468] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fd21362-d62c-4767-b4de-b76f55446289 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.549971] env[65758]: DEBUG oslo_vmware.api [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1178.549971] env[65758]: value = "task-4661342" [ 1178.549971] env[65758]: _type = "Task" [ 1178.549971] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.559598] env[65758]: DEBUG oslo_vmware.api [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.689380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-8d22530b-1b94-4653-be7e-71423833eb87 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "interface-8d0419d1-c301-4302-80c1-cd0fce7ccba4-8964bfa9-6690-403d-9936-940d8087617c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.758s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.950663] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41af307-22bd-436d-aa19-8e6200ab62c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.960523] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf9e5b1-2391-438e-8fd2-2799f765c9b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.998697] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc954a9-d280-4e4f-81f3-dc58b47950c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.007895] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780c7e7e-17db-4fba-9973-d91e1a98dff1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.026172] env[65758]: DEBUG nova.compute.provider_tree [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.028425] env[65758]: DEBUG nova.compute.manager [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Stashing vm_state: active {{(pid=65758) _prep_resize /opt/stack/nova/nova/compute/manager.py:6176}} [ 1179.062024] env[65758]: DEBUG oslo_vmware.api [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152467} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.062401] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1179.062693] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1179.062954] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1179.063241] env[65758]: INFO nova.compute.manager [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1179.063446] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1179.063674] env[65758]: DEBUG nova.compute.manager [-] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1179.063742] env[65758]: DEBUG nova.network.neutron [-] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1179.064082] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1179.064681] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1179.064983] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1179.144598] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1179.533708] env[65758]: DEBUG nova.scheduler.client.report [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1179.575373] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.019934] env[65758]: DEBUG nova.compute.manager [req-d8c75c9e-3271-458f-8700-2ea7a4a9c689 req-1258ad7d-eda7-4138-9105-31a34d6bbc02 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Received event network-vif-deleted-924f7463-7e8c-4f58-af04-46082cd691ed {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1180.020206] env[65758]: INFO nova.compute.manager [req-d8c75c9e-3271-458f-8700-2ea7a4a9c689 req-1258ad7d-eda7-4138-9105-31a34d6bbc02 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Neutron deleted interface 924f7463-7e8c-4f58-af04-46082cd691ed; detaching it from the instance and deleting it from the info cache [ 1180.020206] env[65758]: DEBUG nova.network.neutron [req-d8c75c9e-3271-458f-8700-2ea7a4a9c689 req-1258ad7d-eda7-4138-9105-31a34d6bbc02 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1180.051256] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.051784] env[65758]: DEBUG nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1180.055823] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.617s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.057302] env[65758]: INFO nova.compute.claims [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.488279] env[65758]: DEBUG nova.network.neutron [-] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1180.522828] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94e1d941-fd23-4610-8364-cadac55e6a7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.533929] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777d2881-80ee-4d8f-92c9-3d6e53a9f38a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.564810] env[65758]: DEBUG nova.compute.utils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1180.568564] env[65758]: DEBUG nova.compute.manager [req-d8c75c9e-3271-458f-8700-2ea7a4a9c689 req-1258ad7d-eda7-4138-9105-31a34d6bbc02 service nova] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Detach interface failed, port_id=924f7463-7e8c-4f58-af04-46082cd691ed, reason: Instance 8d0419d1-c301-4302-80c1-cd0fce7ccba4 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1180.569028] env[65758]: DEBUG nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1180.569328] env[65758]: DEBUG nova.network.neutron [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1180.570136] env[65758]: WARNING neutronclient.v2_0.client [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1180.570136] env[65758]: WARNING neutronclient.v2_0.client [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1180.570778] env[65758]: WARNING openstack [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1180.571156] env[65758]: WARNING openstack [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1180.628170] env[65758]: DEBUG nova.policy [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b15f650508f844388197b63e6fee78a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4c2ab2b80c04c38bfb4c7cafac87fe6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1180.990925] env[65758]: INFO nova.compute.manager [-] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Took 1.93 seconds to deallocate network for instance. [ 1181.080818] env[65758]: DEBUG nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1181.218770] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762b838f-fa03-41ec-af89-d8b5014c07c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.226917] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cf3ee2-d1a0-492e-a7b5-0bc9effe95f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.261103] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406ad583-8e08-4ab6-8584-b5cdc35106e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.270582] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723aa7e7-41c3-4bd0-b902-01722f6aa2c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.289066] env[65758]: DEBUG nova.compute.provider_tree [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.307403] env[65758]: DEBUG nova.network.neutron [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Successfully created port: 084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1181.497707] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.792935] env[65758]: DEBUG nova.scheduler.client.report [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1182.098393] env[65758]: DEBUG nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1182.136076] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1182.136076] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1182.136076] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1182.136076] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1182.136076] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1182.136076] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1182.136076] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1182.136076] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1182.137919] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1182.138115] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1182.138293] env[65758]: DEBUG nova.virt.hardware [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1182.139545] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd19e56-3497-41f3-8a30-b0d1e4f451e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.150709] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c84cc59-35a6-43fc-ae20-7304d91cde62 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.299108] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.299483] env[65758]: DEBUG nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1182.304356] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.729s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.810022] env[65758]: DEBUG nova.compute.utils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1182.817440] env[65758]: INFO nova.compute.claims [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1182.821939] env[65758]: DEBUG nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1182.822722] env[65758]: DEBUG nova.network.neutron [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1182.823539] env[65758]: WARNING neutronclient.v2_0.client [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1182.823995] env[65758]: WARNING neutronclient.v2_0.client [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1182.825178] env[65758]: WARNING openstack [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1182.825568] env[65758]: WARNING openstack [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1183.323627] env[65758]: DEBUG nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1183.336418] env[65758]: INFO nova.compute.resource_tracker [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating resource usage from migration 75fd58b3-37ba-47e0-bee0-bec414fd08b6 [ 1183.465642] env[65758]: DEBUG nova.policy [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc85d2d1d84f4df0b4de5e6388bb9398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82bfbb5ee6714c9aa5119cb714d28ce2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1183.529156] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81072c7-0394-4d65-b724-4978554cbe89 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.541415] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e3f8fa-a2e9-42f1-98ad-e6f6fc8f517a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.579980] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297e39f9-5347-4076-af74-2e9101c8beaf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.588708] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af158a32-267c-40b2-9a68-ae66e5d5d6ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.605658] env[65758]: DEBUG nova.compute.provider_tree [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.631037] env[65758]: DEBUG nova.compute.manager [req-4e6ae55d-51cd-45ca-8a3d-317849fe0099 req-9802e4ac-07a2-4d21-941f-75bc48fc4ded service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Received event network-vif-plugged-084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1183.631259] env[65758]: DEBUG oslo_concurrency.lockutils [req-4e6ae55d-51cd-45ca-8a3d-317849fe0099 req-9802e4ac-07a2-4d21-941f-75bc48fc4ded service nova] Acquiring lock "1f773924-74ee-4151-81ba-d105ce225289-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.631467] env[65758]: DEBUG oslo_concurrency.lockutils [req-4e6ae55d-51cd-45ca-8a3d-317849fe0099 req-9802e4ac-07a2-4d21-941f-75bc48fc4ded service nova] Lock "1f773924-74ee-4151-81ba-d105ce225289-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.632528] env[65758]: DEBUG oslo_concurrency.lockutils [req-4e6ae55d-51cd-45ca-8a3d-317849fe0099 req-9802e4ac-07a2-4d21-941f-75bc48fc4ded service nova] Lock "1f773924-74ee-4151-81ba-d105ce225289-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.632528] env[65758]: DEBUG nova.compute.manager [req-4e6ae55d-51cd-45ca-8a3d-317849fe0099 req-9802e4ac-07a2-4d21-941f-75bc48fc4ded service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] No waiting events found dispatching network-vif-plugged-084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1183.632528] env[65758]: WARNING nova.compute.manager [req-4e6ae55d-51cd-45ca-8a3d-317849fe0099 req-9802e4ac-07a2-4d21-941f-75bc48fc4ded service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Received unexpected event network-vif-plugged-084d10cd-9734-4baf-91b3-892d54084a42 for instance with vm_state building and task_state spawning. [ 1183.717669] env[65758]: DEBUG nova.network.neutron [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Successfully updated port: 084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1183.837117] env[65758]: DEBUG nova.network.neutron [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Successfully created port: 47faf1b5-f52f-4a76-818e-bd682ef2c632 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1184.110440] env[65758]: DEBUG nova.scheduler.client.report [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1184.225859] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.226206] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.226299] env[65758]: DEBUG nova.network.neutron [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1184.337652] env[65758]: DEBUG nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1184.363217] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1184.363394] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1184.363568] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1184.363817] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1184.363900] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1184.363987] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1184.364211] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1184.364371] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1184.364541] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1184.364744] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1184.365017] env[65758]: DEBUG nova.virt.hardware [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1184.365958] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b728751-a4e7-478c-b660-c221faebf112 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.374710] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788fc4b6-0630-43b1-b820-c86107122132 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.619362] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.315s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.619589] env[65758]: INFO nova.compute.manager [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Migrating [ 1184.629267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.132s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.629646] env[65758]: DEBUG nova.objects.instance [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'resources' on Instance uuid 8d0419d1-c301-4302-80c1-cd0fce7ccba4 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1184.732309] env[65758]: WARNING openstack [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1184.732736] env[65758]: WARNING openstack [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1184.870871] env[65758]: DEBUG nova.network.neutron [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1185.055559] env[65758]: WARNING neutronclient.v2_0.client [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1185.055948] env[65758]: WARNING openstack [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.056355] env[65758]: WARNING openstack [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1185.140410] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.140624] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.140869] env[65758]: DEBUG nova.network.neutron [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1185.223745] env[65758]: DEBUG nova.network.neutron [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updating instance_info_cache with network_info: [{"id": "084d10cd-9734-4baf-91b3-892d54084a42", "address": "fa:16:3e:02:7e:09", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap084d10cd-97", "ovs_interfaceid": "084d10cd-9734-4baf-91b3-892d54084a42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1185.291025] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8498ec-3165-43a3-b378-c8a0def5b3d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.300585] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4616feab-b0fb-4cbf-a7db-75a7b47e23ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.342036] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbeff57-e24b-4528-8edd-d6d9b8ac6708 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.350785] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8885e43-b980-4098-8f92-d66a116848ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.366664] env[65758]: DEBUG nova.compute.provider_tree [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.463677] env[65758]: DEBUG nova.network.neutron [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Successfully updated port: 47faf1b5-f52f-4a76-818e-bd682ef2c632 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1185.588615] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "0c64f9ad-33e1-4792-9b44-b088d77c0383" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.588839] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "0c64f9ad-33e1-4792-9b44-b088d77c0383" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.644832] env[65758]: WARNING neutronclient.v2_0.client [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1185.645664] env[65758]: WARNING openstack [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.646063] env[65758]: WARNING openstack [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1185.664064] env[65758]: DEBUG nova.compute.manager [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Received event network-changed-084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1185.664270] env[65758]: DEBUG nova.compute.manager [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Refreshing instance network info cache due to event network-changed-084d10cd-9734-4baf-91b3-892d54084a42. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1185.664456] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Acquiring lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.728437] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.728544] env[65758]: DEBUG nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Instance network_info: |[{"id": "084d10cd-9734-4baf-91b3-892d54084a42", "address": "fa:16:3e:02:7e:09", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap084d10cd-97", "ovs_interfaceid": "084d10cd-9734-4baf-91b3-892d54084a42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1185.729039] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Acquired lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.729219] env[65758]: DEBUG nova.network.neutron [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Refreshing network info cache for port 084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1185.730465] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:7e:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5efce30e-48dd-493a-a354-f562a8adf7af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '084d10cd-9734-4baf-91b3-892d54084a42', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1185.738772] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1185.739910] env[65758]: WARNING neutronclient.v2_0.client [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1185.740535] env[65758]: WARNING openstack [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.740882] env[65758]: WARNING openstack [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1185.747717] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1185.748401] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af1dcdfd-98f4-47e0-917d-f19ddd6f268a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.769167] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1185.769167] env[65758]: value = "task-4661343" [ 1185.769167] env[65758]: _type = "Task" [ 1185.769167] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.777724] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661343, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.822905] env[65758]: WARNING neutronclient.v2_0.client [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1185.823588] env[65758]: WARNING openstack [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.827022] env[65758]: WARNING openstack [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1185.869857] env[65758]: DEBUG nova.scheduler.client.report [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1185.929794] env[65758]: DEBUG nova.network.neutron [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [{"id": "b91df992-11ae-4d37-af24-380860864b45", "address": "fa:16:3e:13:ee:a2", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb91df992-11", "ovs_interfaceid": "b91df992-11ae-4d37-af24-380860864b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1185.967629] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "refresh_cache-2014e795-5c62-47c2-9574-2f32ba29638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.967851] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "refresh_cache-2014e795-5c62-47c2-9574-2f32ba29638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.968054] env[65758]: DEBUG nova.network.neutron [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1185.976750] env[65758]: WARNING neutronclient.v2_0.client [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1185.977329] env[65758]: WARNING openstack [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1185.978031] env[65758]: WARNING openstack [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1186.030100] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "a9550f72-009c-4143-afe2-887727e5c071" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.030540] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.088374] env[65758]: DEBUG nova.network.neutron [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updated VIF entry in instance network info cache for port 084d10cd-9734-4baf-91b3-892d54084a42. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1186.088968] env[65758]: DEBUG nova.network.neutron [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updating instance_info_cache with network_info: [{"id": "084d10cd-9734-4baf-91b3-892d54084a42", "address": "fa:16:3e:02:7e:09", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap084d10cd-97", "ovs_interfaceid": "084d10cd-9734-4baf-91b3-892d54084a42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1186.090839] env[65758]: DEBUG nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1186.280406] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661343, 'name': CreateVM_Task, 'duration_secs': 0.392601} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.280603] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1186.281132] env[65758]: WARNING neutronclient.v2_0.client [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1186.281500] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.281652] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.281980] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1186.282263] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-119d2276-b007-4888-8ba3-aca42d676f76 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.288508] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1186.288508] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bb0607-662c-e816-7ac3-4bbe458b21b5" [ 1186.288508] env[65758]: _type = "Task" [ 1186.288508] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.298295] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bb0607-662c-e816-7ac3-4bbe458b21b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.374598] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.745s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.398547] env[65758]: INFO nova.scheduler.client.report [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleted allocations for instance 8d0419d1-c301-4302-80c1-cd0fce7ccba4 [ 1186.433384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.470663] env[65758]: WARNING openstack [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1186.471124] env[65758]: WARNING openstack [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1186.509081] env[65758]: DEBUG nova.network.neutron [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1186.533268] env[65758]: DEBUG nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1186.583752] env[65758]: WARNING neutronclient.v2_0.client [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1186.584435] env[65758]: WARNING openstack [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1186.584810] env[65758]: WARNING openstack [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1186.596059] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Releasing lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.596200] env[65758]: DEBUG nova.compute.manager [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Received event network-vif-plugged-47faf1b5-f52f-4a76-818e-bd682ef2c632 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1186.596379] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Acquiring lock "2014e795-5c62-47c2-9574-2f32ba29638d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.596520] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Lock "2014e795-5c62-47c2-9574-2f32ba29638d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.596679] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Lock "2014e795-5c62-47c2-9574-2f32ba29638d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.596840] env[65758]: DEBUG nova.compute.manager [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] No waiting events found dispatching network-vif-plugged-47faf1b5-f52f-4a76-818e-bd682ef2c632 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1186.597013] env[65758]: WARNING nova.compute.manager [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Received unexpected event network-vif-plugged-47faf1b5-f52f-4a76-818e-bd682ef2c632 for instance with vm_state building and task_state spawning. [ 1186.597270] env[65758]: DEBUG nova.compute.manager [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Received event network-changed-47faf1b5-f52f-4a76-818e-bd682ef2c632 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1186.597400] env[65758]: DEBUG nova.compute.manager [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Refreshing instance network info cache due to event network-changed-47faf1b5-f52f-4a76-818e-bd682ef2c632. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1186.597564] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Acquiring lock "refresh_cache-2014e795-5c62-47c2-9574-2f32ba29638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.612102] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.612378] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.614028] env[65758]: INFO nova.compute.claims [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1186.668621] env[65758]: DEBUG nova.network.neutron [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Updating instance_info_cache with network_info: [{"id": "47faf1b5-f52f-4a76-818e-bd682ef2c632", "address": "fa:16:3e:ef:7a:b4", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47faf1b5-f5", "ovs_interfaceid": "47faf1b5-f52f-4a76-818e-bd682ef2c632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1186.803639] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bb0607-662c-e816-7ac3-4bbe458b21b5, 'name': SearchDatastore_Task, 'duration_secs': 0.012737} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.804100] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.804392] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1186.804653] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.804869] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.806078] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.806430] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bda5087-fd97-40a6-8459-944fd583b7b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.816763] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.817036] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1186.818054] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da31c55d-4eaf-4fbb-9caa-f9d273da4854 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.825245] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1186.825245] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52df6835-691f-3fcc-da3c-5d7eee716509" [ 1186.825245] env[65758]: _type = "Task" [ 1186.825245] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.838167] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52df6835-691f-3fcc-da3c-5d7eee716509, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.907192] env[65758]: DEBUG oslo_concurrency.lockutils [None req-30eeb330-555c-46e3-acb3-66134b25b67f tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "8d0419d1-c301-4302-80c1-cd0fce7ccba4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.487s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.060467] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.171454] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "refresh_cache-2014e795-5c62-47c2-9574-2f32ba29638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.171887] env[65758]: DEBUG nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Instance network_info: |[{"id": "47faf1b5-f52f-4a76-818e-bd682ef2c632", "address": "fa:16:3e:ef:7a:b4", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47faf1b5-f5", "ovs_interfaceid": "47faf1b5-f52f-4a76-818e-bd682ef2c632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1187.172244] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Acquired lock "refresh_cache-2014e795-5c62-47c2-9574-2f32ba29638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.172440] env[65758]: DEBUG nova.network.neutron [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Refreshing network info cache for port 47faf1b5-f52f-4a76-818e-bd682ef2c632 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1187.173634] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:7a:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47faf1b5-f52f-4a76-818e-bd682ef2c632', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1187.181768] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1187.183231] env[65758]: WARNING neutronclient.v2_0.client [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1187.183901] env[65758]: WARNING openstack [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1187.184288] env[65758]: WARNING openstack [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1187.191707] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1187.192461] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-959ecfd1-c68b-454b-8f63-7f1b12786e1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.214843] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1187.214843] env[65758]: value = "task-4661344" [ 1187.214843] env[65758]: _type = "Task" [ 1187.214843] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.227375] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661344, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.316549] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "62ae50af-ff52-4084-8161-1a650eff5247" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.316824] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "62ae50af-ff52-4084-8161-1a650eff5247" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.317051] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "62ae50af-ff52-4084-8161-1a650eff5247-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.317240] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "62ae50af-ff52-4084-8161-1a650eff5247-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.317406] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "62ae50af-ff52-4084-8161-1a650eff5247-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.319985] env[65758]: INFO nova.compute.manager [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Terminating instance [ 1187.343228] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52df6835-691f-3fcc-da3c-5d7eee716509, 'name': SearchDatastore_Task, 'duration_secs': 0.009757} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.343639] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e33183a-239d-4146-8180-95885ed7cd4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.350330] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1187.350330] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5271aed6-d768-ce2c-5a9b-c51967d06775" [ 1187.350330] env[65758]: _type = "Task" [ 1187.350330] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.359483] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5271aed6-d768-ce2c-5a9b-c51967d06775, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.365029] env[65758]: WARNING neutronclient.v2_0.client [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1187.365727] env[65758]: WARNING openstack [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1187.366114] env[65758]: WARNING openstack [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1187.463792] env[65758]: DEBUG nova.network.neutron [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Updated VIF entry in instance network info cache for port 47faf1b5-f52f-4a76-818e-bd682ef2c632. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1187.464319] env[65758]: DEBUG nova.network.neutron [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Updating instance_info_cache with network_info: [{"id": "47faf1b5-f52f-4a76-818e-bd682ef2c632", "address": "fa:16:3e:ef:7a:b4", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47faf1b5-f5", "ovs_interfaceid": "47faf1b5-f52f-4a76-818e-bd682ef2c632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1187.726502] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661344, 'name': CreateVM_Task, 'duration_secs': 0.340787} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.726693] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.730137] env[65758]: WARNING neutronclient.v2_0.client [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1187.730552] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.730704] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.731049] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1187.731602] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdcaadac-6c68-4745-b9d0-11be5b0add1f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.737891] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1187.737891] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524a642a-a073-3ba6-d452-e2e2b55f58d1" [ 1187.737891] env[65758]: _type = "Task" [ 1187.737891] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.743663] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544ffbbf-f3f0-49b6-a958-3489671bab51 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.749186] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a642a-a073-3ba6-d452-e2e2b55f58d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.754266] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482af1bd-9246-416b-b9da-d7f123c430ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.786337] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435e2a3e-0863-430a-b29d-f7e643586012 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.794904] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fca7cf-2960-49e2-8974-9cb0ad47ca8d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.810307] env[65758]: DEBUG nova.compute.provider_tree [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.823952] env[65758]: DEBUG nova.compute.manager [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1187.824268] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1187.825637] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd01f1db-100c-4990-8e6d-929b6c77ed1c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.834833] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1187.835116] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-434ca5c3-98f8-4087-a392-99e54d00da7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.843711] env[65758]: DEBUG oslo_vmware.api [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1187.843711] env[65758]: value = "task-4661345" [ 1187.843711] env[65758]: _type = "Task" [ 1187.843711] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.852690] env[65758]: DEBUG oslo_vmware.api [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.863162] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5271aed6-d768-ce2c-5a9b-c51967d06775, 'name': SearchDatastore_Task, 'duration_secs': 0.010778} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.863430] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.863728] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 1f773924-74ee-4151-81ba-d105ce225289/1f773924-74ee-4151-81ba-d105ce225289.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1187.863976] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abdf4f97-0dd2-477a-b8a3-422cee00d099 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.871922] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1187.871922] env[65758]: value = "task-4661346" [ 1187.871922] env[65758]: _type = "Task" [ 1187.871922] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.882526] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.951576] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14ad7e2-5886-4a98-8269-2b39d1d7ade4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.978247] env[65758]: DEBUG oslo_concurrency.lockutils [req-16482c79-6e1a-41a0-bf6a-f4228a8438c0 req-fc235822-5f4e-49dc-b754-e634783d01cc service nova] Releasing lock "refresh_cache-2014e795-5c62-47c2-9574-2f32ba29638d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.978857] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance 'ba16e0fe-6748-4d14-bb28-a65d63a2274d' progress to 0 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1188.251086] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524a642a-a073-3ba6-d452-e2e2b55f58d1, 'name': SearchDatastore_Task, 'duration_secs': 0.010125} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.251086] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.251343] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.251638] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.251797] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.251984] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1188.252312] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fad2379-436b-4b26-b304-32c4653724fe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.271513] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1188.271740] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1188.272789] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2a1a0be-7005-48ab-88f7-22b126121e9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.281454] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1188.281454] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]521ea799-240a-38a0-6e99-dce2bb48f8d4" [ 1188.281454] env[65758]: _type = "Task" [ 1188.281454] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.292287] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521ea799-240a-38a0-6e99-dce2bb48f8d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.313976] env[65758]: DEBUG nova.scheduler.client.report [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1188.356823] env[65758]: DEBUG oslo_vmware.api [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661345, 'name': PowerOffVM_Task, 'duration_secs': 0.218104} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.357170] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1188.357341] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1188.357637] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0ce0f4b-2bac-4dbb-af23-baf70cb62959 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.383512] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661346, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502303} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.383800] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 1f773924-74ee-4151-81ba-d105ce225289/1f773924-74ee-4151-81ba-d105ce225289.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1188.384023] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1188.384299] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dcd8a8a9-29ad-450d-bb11-93d30d53d3cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.391986] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1188.391986] env[65758]: value = "task-4661348" [ 1188.391986] env[65758]: _type = "Task" [ 1188.391986] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.402456] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661348, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.432969] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1188.433290] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1188.433483] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleting the datastore file [datastore2] 62ae50af-ff52-4084-8161-1a650eff5247 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1188.433803] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8a3216c-6626-43ea-86fc-b4ac0417e941 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.442056] env[65758]: DEBUG oslo_vmware.api [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for the task: (returnval){ [ 1188.442056] env[65758]: value = "task-4661349" [ 1188.442056] env[65758]: _type = "Task" [ 1188.442056] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.450899] env[65758]: DEBUG oslo_vmware.api [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.485570] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1188.485915] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18404638-3b3d-450b-ac3c-41bec922298e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.494879] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1188.494879] env[65758]: value = "task-4661350" [ 1188.494879] env[65758]: _type = "Task" [ 1188.494879] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.504566] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.792028] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]521ea799-240a-38a0-6e99-dce2bb48f8d4, 'name': SearchDatastore_Task, 'duration_secs': 0.053687} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.792856] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cee7d585-9f2f-47fc-85dc-512370a01e50 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.799018] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1188.799018] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52419d40-350f-b71f-23ba-c02f03aa57d0" [ 1188.799018] env[65758]: _type = "Task" [ 1188.799018] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.807412] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52419d40-350f-b71f-23ba-c02f03aa57d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.819299] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.820401] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.760s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.821932] env[65758]: INFO nova.compute.claims [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.902370] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661348, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069692} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.902370] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1188.903485] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bd5231-ff4a-406f-ae92-927b66e5939c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.926571] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 1f773924-74ee-4151-81ba-d105ce225289/1f773924-74ee-4151-81ba-d105ce225289.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.926912] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83b0b4a0-39e3-4470-bf2d-7900eeda7cbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.947869] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1188.947869] env[65758]: value = "task-4661351" [ 1188.947869] env[65758]: _type = "Task" [ 1188.947869] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.954537] env[65758]: DEBUG oslo_vmware.api [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Task: {'id': task-4661349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164835} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.955256] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.955444] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.955595] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.955829] env[65758]: INFO nova.compute.manager [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1188.956193] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1188.956417] env[65758]: DEBUG nova.compute.manager [-] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1188.956514] env[65758]: DEBUG nova.network.neutron [-] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1188.956782] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1188.957444] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1188.957726] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1188.968229] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661351, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.006730] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.008240] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1189.308322] env[65758]: DEBUG nova.compute.manager [req-852dd049-4a7b-4ba9-b0cf-7e2879941263 req-fef3be04-8073-47ec-960e-ca5f0af085df service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Received event network-vif-deleted-4b156aab-9aa2-46c6-8e9f-b9912654dcc0 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1189.308537] env[65758]: INFO nova.compute.manager [req-852dd049-4a7b-4ba9-b0cf-7e2879941263 req-fef3be04-8073-47ec-960e-ca5f0af085df service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Neutron deleted interface 4b156aab-9aa2-46c6-8e9f-b9912654dcc0; detaching it from the instance and deleting it from the info cache [ 1189.308712] env[65758]: DEBUG nova.network.neutron [req-852dd049-4a7b-4ba9-b0cf-7e2879941263 req-fef3be04-8073-47ec-960e-ca5f0af085df service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1189.315064] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52419d40-350f-b71f-23ba-c02f03aa57d0, 'name': SearchDatastore_Task, 'duration_secs': 0.014037} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.315113] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.315352] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2014e795-5c62-47c2-9574-2f32ba29638d/2014e795-5c62-47c2-9574-2f32ba29638d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1189.316015] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9829dfc-4ae6-4183-bd01-47ddd2e8475f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.325952] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "ed17224e-22e4-40b2-af76-49e97ed11543" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.326234] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "ed17224e-22e4-40b2-af76-49e97ed11543" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.329332] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1189.329332] env[65758]: value = "task-4661352" [ 1189.329332] env[65758]: _type = "Task" [ 1189.329332] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.342387] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.460215] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661351, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.478634] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.508101] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.717548] env[65758]: DEBUG nova.network.neutron [-] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1189.812498] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aaa92dc2-e015-4dd0-b1ee-2974da0ae204 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.823503] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c43b3ca-e219-41c2-867a-cc6599cc4423 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.839043] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "ed17224e-22e4-40b2-af76-49e97ed11543" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.512s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.839541] env[65758]: DEBUG nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1189.853740] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510128} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.854030] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2014e795-5c62-47c2-9574-2f32ba29638d/2014e795-5c62-47c2-9574-2f32ba29638d.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1189.854245] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1189.854524] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-faaec82f-eaf0-481e-a2a7-657b384aab03 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.865918] env[65758]: DEBUG nova.compute.manager [req-852dd049-4a7b-4ba9-b0cf-7e2879941263 req-fef3be04-8073-47ec-960e-ca5f0af085df service nova] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Detach interface failed, port_id=4b156aab-9aa2-46c6-8e9f-b9912654dcc0, reason: Instance 62ae50af-ff52-4084-8161-1a650eff5247 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1189.870416] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1189.870416] env[65758]: value = "task-4661353" [ 1189.870416] env[65758]: _type = "Task" [ 1189.870416] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.880207] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.961157] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661351, 'name': ReconfigVM_Task, 'duration_secs': 0.909258} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.961390] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 1f773924-74ee-4151-81ba-d105ce225289/1f773924-74ee-4151-81ba-d105ce225289.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.962044] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da10d409-a399-4b79-a3c5-1b3b39623abb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.968634] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1189.968634] env[65758]: value = "task-4661354" [ 1189.968634] env[65758]: _type = "Task" [ 1189.968634] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.979819] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661354, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.981820] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.997223] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff6e4ae-e39e-48d7-9a78-1d6eeb0c1bdc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.008772] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661350, 'name': PowerOffVM_Task, 'duration_secs': 1.203956} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.010683] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1190.010877] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance 'ba16e0fe-6748-4d14-bb28-a65d63a2274d' progress to 17 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1190.015020] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8c2ac0-6fd0-42c2-817e-971bcd87a388 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.050245] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88781a85-1827-4a98-84e4-00e01615c581 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.059823] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b8a335-c78a-4b0a-8944-8a911a9ea7c2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.075501] env[65758]: DEBUG nova.compute.provider_tree [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.220661] env[65758]: INFO nova.compute.manager [-] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Took 1.26 seconds to deallocate network for instance. [ 1190.344373] env[65758]: DEBUG nova.compute.utils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1190.346153] env[65758]: DEBUG nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1190.346260] env[65758]: DEBUG nova.network.neutron [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1190.346497] env[65758]: WARNING neutronclient.v2_0.client [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1190.346857] env[65758]: WARNING neutronclient.v2_0.client [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1190.347398] env[65758]: WARNING openstack [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1190.347743] env[65758]: WARNING openstack [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1190.381017] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068784} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.381291] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1190.382110] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e50d8a4-731b-4df0-b921-91dbd3d24346 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.405668] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 2014e795-5c62-47c2-9574-2f32ba29638d/2014e795-5c62-47c2-9574-2f32ba29638d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.409197] env[65758]: DEBUG nova.policy [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7baff1b2ed4641b883e2a9a56763a006', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ea01f1ae2924702a6d3e4f9f0999152', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1190.410961] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b453e0d4-eb85-4243-b558-208cce3397e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.431679] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1190.431679] env[65758]: value = "task-4661355" [ 1190.431679] env[65758]: _type = "Task" [ 1190.431679] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.442920] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.479586] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661354, 'name': Rename_Task, 'duration_secs': 0.152171} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.479874] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1190.480185] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fefa9e9-7065-4c9a-a8e9-748e51bc3388 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.488481] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1190.488481] env[65758]: value = "task-4661356" [ 1190.488481] env[65758]: _type = "Task" [ 1190.488481] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.498054] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.521701] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1190.522093] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1190.522168] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1190.522290] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1190.522433] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1190.522572] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1190.522898] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1190.523052] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1190.523223] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1190.523382] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1190.523550] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1190.528780] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d84b67b-fbfb-4f2b-b271-816ce7353075 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.545461] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1190.545461] env[65758]: value = "task-4661357" [ 1190.545461] env[65758]: _type = "Task" [ 1190.545461] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.554280] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661357, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.578606] env[65758]: DEBUG nova.scheduler.client.report [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.726973] env[65758]: DEBUG nova.network.neutron [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Successfully created port: c67cebc7-ffed-46f4-83f9-32b2a75e0b87 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1190.730199] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.856395] env[65758]: DEBUG nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1190.946009] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661355, 'name': ReconfigVM_Task, 'duration_secs': 0.285452} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.946344] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 2014e795-5c62-47c2-9574-2f32ba29638d/2014e795-5c62-47c2-9574-2f32ba29638d.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.947037] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c4cc7a1-51af-4682-b382-b0bb82fb9774 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.954302] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1190.954302] env[65758]: value = "task-4661358" [ 1190.954302] env[65758]: _type = "Task" [ 1190.954302] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.963298] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661358, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.998789] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661356, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.056078] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661357, 'name': ReconfigVM_Task, 'duration_secs': 0.409111} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.056422] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance 'ba16e0fe-6748-4d14-bb28-a65d63a2274d' progress to 33 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1191.084165] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.084700] env[65758]: DEBUG nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1191.087854] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.106s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.088112] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.088277] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1191.088595] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.359s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.088802] env[65758]: DEBUG nova.objects.instance [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lazy-loading 'resources' on Instance uuid 62ae50af-ff52-4084-8161-1a650eff5247 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.090863] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a4487a-6d79-4c06-a9bc-e06e68233acb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.100420] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e11db4-8ebd-4c56-9710-a35ed24cf26e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.119903] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94753624-cf3f-4fc1-a780-88d55a8ae94c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.128528] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cad9b7-d5c8-4b4c-9ff6-fbe6f53703b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.163248] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179103MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1191.163408] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.465162] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661358, 'name': Rename_Task, 'duration_secs': 0.163953} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.465499] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1191.465884] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15d8648a-4576-47bd-ac63-a18e15e75268 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.473198] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1191.473198] env[65758]: value = "task-4661359" [ 1191.473198] env[65758]: _type = "Task" [ 1191.473198] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.481806] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.499907] env[65758]: DEBUG oslo_vmware.api [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661356, 'name': PowerOnVM_Task, 'duration_secs': 0.666236} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.500235] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1191.500511] env[65758]: INFO nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1191.500739] env[65758]: DEBUG nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1191.501560] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de75331-e8e7-418b-a222-a8e3a24353a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.563525] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1191.563525] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1191.563525] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1191.564442] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1191.564442] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1191.564442] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1191.564442] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1191.564442] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1191.564442] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1191.564638] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1191.564756] env[65758]: DEBUG nova.virt.hardware [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1191.570191] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1191.571266] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66ab56a0-f58c-45f3-a0ff-57108bcc1b7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.591659] env[65758]: DEBUG nova.compute.utils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1191.594697] env[65758]: DEBUG nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1191.594951] env[65758]: DEBUG nova.network.neutron [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1191.595312] env[65758]: WARNING neutronclient.v2_0.client [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1191.595628] env[65758]: WARNING neutronclient.v2_0.client [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1191.596353] env[65758]: WARNING openstack [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1191.596712] env[65758]: WARNING openstack [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1191.606652] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1191.606652] env[65758]: value = "task-4661360" [ 1191.606652] env[65758]: _type = "Task" [ 1191.606652] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.621047] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661360, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.658051] env[65758]: DEBUG nova.policy [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcb6cf498b804adb971dd7e1722c277b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f32b2100e0824c56ab852e0d1bb37e87', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1191.751733] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ca6a1f-96f6-43dc-bf69-9f347e7297d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.761421] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac67e402-d948-4820-a29e-fbd1bf21bb46 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.803746] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136359fb-acf6-40da-8ee5-f471a4fb22ff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.814042] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54731a1-9422-455a-bed0-d511c6b77927 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.831492] env[65758]: DEBUG nova.compute.provider_tree [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.868513] env[65758]: DEBUG nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1191.905991] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1191.906304] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1191.906452] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1191.906634] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1191.906771] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1191.906907] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1191.907192] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1191.907390] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1191.907565] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1191.907721] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1191.907882] env[65758]: DEBUG nova.virt.hardware [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1191.908773] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d72eacf-1421-44d8-b8c4-35855d0531b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.918300] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b691520-a889-40d0-b029-24f610d20a8c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.984948] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661359, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.021620] env[65758]: INFO nova.compute.manager [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Took 15.79 seconds to build instance. [ 1192.066786] env[65758]: DEBUG nova.network.neutron [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Successfully created port: 8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1192.108290] env[65758]: DEBUG nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1192.127507] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661360, 'name': ReconfigVM_Task, 'duration_secs': 0.267} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.127800] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1192.128623] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30706345-ef24-4cf9-841e-d6dc4e31b7af {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.157396] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d/ba16e0fe-6748-4d14-bb28-a65d63a2274d.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.157770] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-142bc8d4-432b-497c-a52d-1bed1a2c47cb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.178657] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1192.178657] env[65758]: value = "task-4661361" [ 1192.178657] env[65758]: _type = "Task" [ 1192.178657] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.188564] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661361, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.277906] env[65758]: DEBUG nova.network.neutron [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Successfully updated port: c67cebc7-ffed-46f4-83f9-32b2a75e0b87 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1192.335938] env[65758]: DEBUG nova.scheduler.client.report [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1192.345022] env[65758]: DEBUG nova.compute.manager [req-207ba95b-960c-4181-9c26-3da6357ddddd req-e1c7d8ae-414c-4b57-a3fe-6a228349b7b1 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Received event network-vif-plugged-c67cebc7-ffed-46f4-83f9-32b2a75e0b87 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1192.345022] env[65758]: DEBUG oslo_concurrency.lockutils [req-207ba95b-960c-4181-9c26-3da6357ddddd req-e1c7d8ae-414c-4b57-a3fe-6a228349b7b1 service nova] Acquiring lock "0c64f9ad-33e1-4792-9b44-b088d77c0383-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.345022] env[65758]: DEBUG oslo_concurrency.lockutils [req-207ba95b-960c-4181-9c26-3da6357ddddd req-e1c7d8ae-414c-4b57-a3fe-6a228349b7b1 service nova] Lock "0c64f9ad-33e1-4792-9b44-b088d77c0383-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.345022] env[65758]: DEBUG oslo_concurrency.lockutils [req-207ba95b-960c-4181-9c26-3da6357ddddd req-e1c7d8ae-414c-4b57-a3fe-6a228349b7b1 service nova] Lock "0c64f9ad-33e1-4792-9b44-b088d77c0383-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.345022] env[65758]: DEBUG nova.compute.manager [req-207ba95b-960c-4181-9c26-3da6357ddddd req-e1c7d8ae-414c-4b57-a3fe-6a228349b7b1 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] No waiting events found dispatching network-vif-plugged-c67cebc7-ffed-46f4-83f9-32b2a75e0b87 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1192.345022] env[65758]: WARNING nova.compute.manager [req-207ba95b-960c-4181-9c26-3da6357ddddd req-e1c7d8ae-414c-4b57-a3fe-6a228349b7b1 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Received unexpected event network-vif-plugged-c67cebc7-ffed-46f4-83f9-32b2a75e0b87 for instance with vm_state building and task_state spawning. [ 1192.487996] env[65758]: DEBUG oslo_vmware.api [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661359, 'name': PowerOnVM_Task, 'duration_secs': 0.865683} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.488988] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1192.489339] env[65758]: INFO nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1192.489930] env[65758]: DEBUG nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1192.490871] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc547f68-546f-446c-9c78-00633c867997 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.525983] env[65758]: DEBUG oslo_concurrency.lockutils [None req-23a4b451-f071-4a39-b080-1e546fd0a9f8 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "1f773924-74ee-4151-81ba-d105ce225289" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.300s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.689627] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.779687] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "refresh_cache-0c64f9ad-33e1-4792-9b44-b088d77c0383" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.779902] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquired lock "refresh_cache-0c64f9ad-33e1-4792-9b44-b088d77c0383" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.780097] env[65758]: DEBUG nova.network.neutron [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1192.845062] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.848093] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.685s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.871204] env[65758]: INFO nova.scheduler.client.report [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Deleted allocations for instance 62ae50af-ff52-4084-8161-1a650eff5247 [ 1193.015209] env[65758]: INFO nova.compute.manager [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Took 14.59 seconds to build instance. [ 1193.127162] env[65758]: DEBUG nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1193.157923] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1193.158356] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1193.158475] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1193.158593] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1193.158741] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1193.158887] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1193.159109] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1193.159270] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1193.159435] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1193.159596] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1193.159767] env[65758]: DEBUG nova.virt.hardware [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1193.160696] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b432aa2-e01c-4829-bf83-0e49217c265f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.170225] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94848b80-5d2e-45c7-8b56-46636dbcbd47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.210985] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661361, 'name': ReconfigVM_Task, 'duration_secs': 0.869382} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.210985] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d/ba16e0fe-6748-4d14-bb28-a65d63a2274d.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1193.210985] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance 'ba16e0fe-6748-4d14-bb28-a65d63a2274d' progress to 50 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1193.338311] env[65758]: WARNING openstack [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1193.338311] env[65758]: WARNING openstack [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1193.349956] env[65758]: DEBUG nova.network.neutron [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1193.381534] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1b0036bd-c44f-467c-abf7-f9eb5969bc06 tempest-AttachInterfacesTestJSON-1838442919 tempest-AttachInterfacesTestJSON-1838442919-project-member] Lock "62ae50af-ff52-4084-8161-1a650eff5247" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.065s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.521111] env[65758]: DEBUG oslo_concurrency.lockutils [None req-759347ab-5796-41ac-989f-3c2f80d3a853 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "2014e795-5c62-47c2-9574-2f32ba29638d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.105s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.544756] env[65758]: WARNING neutronclient.v2_0.client [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1193.545516] env[65758]: WARNING openstack [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1193.545903] env[65758]: WARNING openstack [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1193.620588] env[65758]: DEBUG nova.compute.manager [req-c1edc93e-0a40-406d-b7f6-8009e0af1d0c req-9d2faf9a-c1a1-4623-869c-9bc0312c7fc6 service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Received event network-vif-plugged-8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1193.620819] env[65758]: DEBUG oslo_concurrency.lockutils [req-c1edc93e-0a40-406d-b7f6-8009e0af1d0c req-9d2faf9a-c1a1-4623-869c-9bc0312c7fc6 service nova] Acquiring lock "a9550f72-009c-4143-afe2-887727e5c071-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.621036] env[65758]: DEBUG oslo_concurrency.lockutils [req-c1edc93e-0a40-406d-b7f6-8009e0af1d0c req-9d2faf9a-c1a1-4623-869c-9bc0312c7fc6 service nova] Lock "a9550f72-009c-4143-afe2-887727e5c071-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.621213] env[65758]: DEBUG oslo_concurrency.lockutils [req-c1edc93e-0a40-406d-b7f6-8009e0af1d0c req-9d2faf9a-c1a1-4623-869c-9bc0312c7fc6 service nova] Lock "a9550f72-009c-4143-afe2-887727e5c071-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.621378] env[65758]: DEBUG nova.compute.manager [req-c1edc93e-0a40-406d-b7f6-8009e0af1d0c req-9d2faf9a-c1a1-4623-869c-9bc0312c7fc6 service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] No waiting events found dispatching network-vif-plugged-8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1193.621541] env[65758]: WARNING nova.compute.manager [req-c1edc93e-0a40-406d-b7f6-8009e0af1d0c req-9d2faf9a-c1a1-4623-869c-9bc0312c7fc6 service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Received unexpected event network-vif-plugged-8bba2462-60e3-4a60-9eac-f9e7a6e5a898 for instance with vm_state building and task_state spawning. [ 1193.675805] env[65758]: DEBUG nova.network.neutron [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Successfully updated port: 8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1193.685457] env[65758]: DEBUG nova.network.neutron [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Updating instance_info_cache with network_info: [{"id": "c67cebc7-ffed-46f4-83f9-32b2a75e0b87", "address": "fa:16:3e:18:ce:25", "network": {"id": "90eeec78-df97-4165-85b4-b66a7c8eb8dc", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-429112371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea01f1ae2924702a6d3e4f9f0999152", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7869cc8e-e58f-4fd6-88d7-85a18e43cd3a", "external-id": "nsx-vlan-transportzone-927", "segmentation_id": 927, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc67cebc7-ff", "ovs_interfaceid": "c67cebc7-ffed-46f4-83f9-32b2a75e0b87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1193.711770] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce599be-7976-4db2-b8bd-8dd60c3b1c37 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.737991] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8620108f-2cf2-4d55-9077-c96d48f4a4ff {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.762733] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance 'ba16e0fe-6748-4d14-bb28-a65d63a2274d' progress to 67 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1193.860344] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Applying migration context for instance ba16e0fe-6748-4d14-bb28-a65d63a2274d as it has an incoming, in-progress migration 75fd58b3-37ba-47e0-bee0-bec414fd08b6. Migration status is migrating {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1193.861406] env[65758]: INFO nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating resource usage from migration 75fd58b3-37ba-47e0-bee0-bec414fd08b6 [ 1193.883113] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance bc10286b-195f-48a2-b16c-f8f925ec7a2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1193.883282] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance cdc1cfab-4f75-4caf-a4ee-8197af083353 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1193.883413] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 1f773924-74ee-4151-81ba-d105ce225289 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1193.883591] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 2014e795-5c62-47c2-9574-2f32ba29638d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1193.883752] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Migration 75fd58b3-37ba-47e0-bee0-bec414fd08b6 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1745}} [ 1193.883892] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance ba16e0fe-6748-4d14-bb28-a65d63a2274d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1193.884020] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 0c64f9ad-33e1-4792-9b44-b088d77c0383 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1193.884135] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a9550f72-009c-4143-afe2-887727e5c071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1193.884332] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1193.884479] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=100GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '7', 'num_vm_active': '5', 'num_task_resize_migrating': '1', 'num_os_type_None': '7', 'num_proj_4095654557a34bb0907071aedb3bb678': '1', 'io_workload': '3', 'num_task_None': '4', 'num_proj_e2440f1694fe4b87a9827f6653ff2e4c': '1', 'num_proj_82bfbb5ee6714c9aa5119cb714d28ce2': '2', 'num_proj_c4c2ab2b80c04c38bfb4c7cafac87fe6': '1', 'num_vm_building': '2', 'num_task_spawning': '2', 'num_proj_3ea01f1ae2924702a6d3e4f9f0999152': '1', 'num_proj_f32b2100e0824c56ab852e0d1bb37e87': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1194.009584] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da065ae7-a7e3-40ed-a841-8a3c374108b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.018139] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb4ac3a-c008-4ac4-95c8-0368ec32e064 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.052602] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05753583-676c-44e1-888d-6f95a36e8de6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.062160] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539f45a4-1dd8-48d8-9a03-c5cb84ddd822 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.077929] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.098460] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "2014e795-5c62-47c2-9574-2f32ba29638d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.098702] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "2014e795-5c62-47c2-9574-2f32ba29638d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.098903] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "2014e795-5c62-47c2-9574-2f32ba29638d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.099237] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "2014e795-5c62-47c2-9574-2f32ba29638d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.099314] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "2014e795-5c62-47c2-9574-2f32ba29638d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.101417] env[65758]: INFO nova.compute.manager [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Terminating instance [ 1194.178816] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.179040] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.179236] env[65758]: DEBUG nova.network.neutron [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1194.188023] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Releasing lock "refresh_cache-0c64f9ad-33e1-4792-9b44-b088d77c0383" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.188615] env[65758]: DEBUG nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Instance network_info: |[{"id": "c67cebc7-ffed-46f4-83f9-32b2a75e0b87", "address": "fa:16:3e:18:ce:25", "network": {"id": "90eeec78-df97-4165-85b4-b66a7c8eb8dc", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-429112371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea01f1ae2924702a6d3e4f9f0999152", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7869cc8e-e58f-4fd6-88d7-85a18e43cd3a", "external-id": "nsx-vlan-transportzone-927", "segmentation_id": 927, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc67cebc7-ff", "ovs_interfaceid": "c67cebc7-ffed-46f4-83f9-32b2a75e0b87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1194.189501] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:ce:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7869cc8e-e58f-4fd6-88d7-85a18e43cd3a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c67cebc7-ffed-46f4-83f9-32b2a75e0b87', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1194.197698] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Creating folder: Project (3ea01f1ae2924702a6d3e4f9f0999152). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1194.198302] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2900c319-e62e-4afd-9ccd-e63e8185572d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.211752] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Created folder: Project (3ea01f1ae2924702a6d3e4f9f0999152) in parent group-v909763. [ 1194.211976] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Creating folder: Instances. Parent ref: group-v910069. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1194.212268] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba738910-5b38-4baf-ab9c-f4faf0eabd9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.223961] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Created folder: Instances in parent group-v910069. [ 1194.224259] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1194.224466] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1194.224728] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47adfa9d-a8ec-40e1-b59e-ac2b8f455a06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.245182] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1194.245182] env[65758]: value = "task-4661364" [ 1194.245182] env[65758]: _type = "Task" [ 1194.245182] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.254023] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661364, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.379071] env[65758]: DEBUG nova.compute.manager [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Received event network-changed-c67cebc7-ffed-46f4-83f9-32b2a75e0b87 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1194.379412] env[65758]: DEBUG nova.compute.manager [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Refreshing instance network info cache due to event network-changed-c67cebc7-ffed-46f4-83f9-32b2a75e0b87. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1194.379731] env[65758]: DEBUG oslo_concurrency.lockutils [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Acquiring lock "refresh_cache-0c64f9ad-33e1-4792-9b44-b088d77c0383" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.379962] env[65758]: DEBUG oslo_concurrency.lockutils [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Acquired lock "refresh_cache-0c64f9ad-33e1-4792-9b44-b088d77c0383" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.380235] env[65758]: DEBUG nova.network.neutron [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Refreshing network info cache for port c67cebc7-ffed-46f4-83f9-32b2a75e0b87 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1194.582913] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1194.606254] env[65758]: DEBUG nova.compute.manager [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1194.606794] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1194.608025] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195f0f13-9058-40b8-84c6-ead49e25eb6e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.618993] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.619105] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aad577d5-d6f6-4187-ad97-8469e65b3905 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.629244] env[65758]: DEBUG oslo_vmware.api [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1194.629244] env[65758]: value = "task-4661365" [ 1194.629244] env[65758]: _type = "Task" [ 1194.629244] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.639908] env[65758]: DEBUG oslo_vmware.api [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.682346] env[65758]: WARNING openstack [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1194.682851] env[65758]: WARNING openstack [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1194.723370] env[65758]: DEBUG nova.network.neutron [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1194.757588] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661364, 'name': CreateVM_Task, 'duration_secs': 0.386098} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.757799] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1194.758331] env[65758]: WARNING neutronclient.v2_0.client [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1194.795059] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.795889] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.795889] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1194.796824] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d3f3daf-9463-4ab6-b01a-736d789cb015 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.802601] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1194.802601] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52502ca4-c155-6720-592f-45afa77ae4b7" [ 1194.802601] env[65758]: _type = "Task" [ 1194.802601] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.812665] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52502ca4-c155-6720-592f-45afa77ae4b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.872848] env[65758]: WARNING neutronclient.v2_0.client [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1194.873698] env[65758]: WARNING openstack [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1194.874120] env[65758]: WARNING openstack [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1194.886362] env[65758]: WARNING neutronclient.v2_0.client [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1194.887023] env[65758]: WARNING openstack [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1194.887635] env[65758]: WARNING openstack [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1195.011943] env[65758]: DEBUG nova.network.neutron [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Updating instance_info_cache with network_info: [{"id": "8bba2462-60e3-4a60-9eac-f9e7a6e5a898", "address": "fa:16:3e:fc:7a:8c", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bba2462-60", "ovs_interfaceid": "8bba2462-60e3-4a60-9eac-f9e7a6e5a898", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1195.089209] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1195.089598] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.241s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.127935] env[65758]: WARNING neutronclient.v2_0.client [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1195.128681] env[65758]: WARNING openstack [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1195.129252] env[65758]: WARNING openstack [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1195.148347] env[65758]: DEBUG oslo_vmware.api [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661365, 'name': PowerOffVM_Task, 'duration_secs': 0.21382} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.148347] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1195.148578] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1195.148789] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2963c7d6-dbfa-4c05-bd4a-460d17757e2d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.228774] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1195.229013] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1195.229216] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleting the datastore file [datastore2] 2014e795-5c62-47c2-9574-2f32ba29638d {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1195.229491] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2eb0dc07-d0d3-48b1-816b-bcb6e4f39992 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.234214] env[65758]: DEBUG nova.network.neutron [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Updated VIF entry in instance network info cache for port c67cebc7-ffed-46f4-83f9-32b2a75e0b87. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1195.234577] env[65758]: DEBUG nova.network.neutron [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Updating instance_info_cache with network_info: [{"id": "c67cebc7-ffed-46f4-83f9-32b2a75e0b87", "address": "fa:16:3e:18:ce:25", "network": {"id": "90eeec78-df97-4165-85b4-b66a7c8eb8dc", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-429112371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ea01f1ae2924702a6d3e4f9f0999152", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7869cc8e-e58f-4fd6-88d7-85a18e43cd3a", "external-id": "nsx-vlan-transportzone-927", "segmentation_id": 927, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc67cebc7-ff", "ovs_interfaceid": "c67cebc7-ffed-46f4-83f9-32b2a75e0b87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1195.241778] env[65758]: DEBUG oslo_vmware.api [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1195.241778] env[65758]: value = "task-4661367" [ 1195.241778] env[65758]: _type = "Task" [ 1195.241778] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.253882] env[65758]: DEBUG oslo_vmware.api [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.318452] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52502ca4-c155-6720-592f-45afa77ae4b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011724} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.318858] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.319119] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1195.319363] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.319680] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.319936] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1195.320291] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-502834dc-a115-4147-a0c1-1f00f2f4dbf6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.330943] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1195.331240] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1195.332205] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a012763c-5199-4b07-9a48-b14f3ac95539 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.339122] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1195.339122] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52f98669-6917-64e0-d411-4b0cb54fd13a" [ 1195.339122] env[65758]: _type = "Task" [ 1195.339122] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.348751] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f98669-6917-64e0-d411-4b0cb54fd13a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.399503] env[65758]: WARNING neutronclient.v2_0.client [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1195.399949] env[65758]: WARNING neutronclient.v2_0.client [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1195.440148] env[65758]: DEBUG nova.network.neutron [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Port b91df992-11ae-4d37-af24-380860864b45 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 1195.515204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.515638] env[65758]: DEBUG nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Instance network_info: |[{"id": "8bba2462-60e3-4a60-9eac-f9e7a6e5a898", "address": "fa:16:3e:fc:7a:8c", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bba2462-60", "ovs_interfaceid": "8bba2462-60e3-4a60-9eac-f9e7a6e5a898", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1195.516123] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:7a:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc16c915-cff1-4faa-a529-9773ee9bab7e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8bba2462-60e3-4a60-9eac-f9e7a6e5a898', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1195.523806] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Creating folder: Project (f32b2100e0824c56ab852e0d1bb37e87). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1195.524475] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f40a3de-1785-41db-957f-34d0c5f88142 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.539114] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Created folder: Project (f32b2100e0824c56ab852e0d1bb37e87) in parent group-v909763. [ 1195.539324] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Creating folder: Instances. Parent ref: group-v910072. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1195.539585] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f807dfc5-6bac-4070-b65a-7e2322913b1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.551856] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Created folder: Instances in parent group-v910072. [ 1195.552395] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1195.552679] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9550f72-009c-4143-afe2-887727e5c071] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1195.553702] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e3882f8-8e9b-4cb7-9468-76a76cd579f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.574080] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1195.574080] env[65758]: value = "task-4661370" [ 1195.574080] env[65758]: _type = "Task" [ 1195.574080] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.582804] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661370, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.671596] env[65758]: DEBUG nova.compute.manager [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Received event network-changed-8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1195.671596] env[65758]: DEBUG nova.compute.manager [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Refreshing instance network info cache due to event network-changed-8bba2462-60e3-4a60-9eac-f9e7a6e5a898. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1195.671786] env[65758]: DEBUG oslo_concurrency.lockutils [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] Acquiring lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.671927] env[65758]: DEBUG oslo_concurrency.lockutils [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] Acquired lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.672424] env[65758]: DEBUG nova.network.neutron [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Refreshing network info cache for port 8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1195.738576] env[65758]: DEBUG oslo_concurrency.lockutils [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Releasing lock "refresh_cache-0c64f9ad-33e1-4792-9b44-b088d77c0383" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.738916] env[65758]: DEBUG nova.compute.manager [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Received event network-changed-084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1195.739204] env[65758]: DEBUG nova.compute.manager [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Refreshing instance network info cache due to event network-changed-084d10cd-9734-4baf-91b3-892d54084a42. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1195.739371] env[65758]: DEBUG oslo_concurrency.lockutils [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Acquiring lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.739532] env[65758]: DEBUG oslo_concurrency.lockutils [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Acquired lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.740148] env[65758]: DEBUG nova.network.neutron [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Refreshing network info cache for port 084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1195.752844] env[65758]: DEBUG oslo_vmware.api [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.753362] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1195.753362] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1195.753362] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1195.753961] env[65758]: INFO nova.compute.manager [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1195.753961] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1195.753961] env[65758]: DEBUG nova.compute.manager [-] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1195.754080] env[65758]: DEBUG nova.network.neutron [-] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1195.754781] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1195.754959] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1195.755281] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1195.793813] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1195.850971] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52f98669-6917-64e0-d411-4b0cb54fd13a, 'name': SearchDatastore_Task, 'duration_secs': 0.012353} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.851929] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81e73115-a9b9-4413-a846-72d3e5667860 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.858726] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1195.858726] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae065b-8ff9-a4be-60ea-d248a1fad58b" [ 1195.858726] env[65758]: _type = "Task" [ 1195.858726] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.867886] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae065b-8ff9-a4be-60ea-d248a1fad58b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.086245] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661370, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.091076] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.091294] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.091445] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.091658] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.091829] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.091968] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.092129] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.092262] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1196.175978] env[65758]: WARNING neutronclient.v2_0.client [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1196.176698] env[65758]: WARNING openstack [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1196.177176] env[65758]: WARNING openstack [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1196.243050] env[65758]: WARNING neutronclient.v2_0.client [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1196.243863] env[65758]: WARNING openstack [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1196.244172] env[65758]: WARNING openstack [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1196.351417] env[65758]: WARNING neutronclient.v2_0.client [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1196.352229] env[65758]: WARNING openstack [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1196.352498] env[65758]: WARNING openstack [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1196.371046] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ae065b-8ff9-a4be-60ea-d248a1fad58b, 'name': SearchDatastore_Task, 'duration_secs': 0.010452} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.371340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.371621] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 0c64f9ad-33e1-4792-9b44-b088d77c0383/0c64f9ad-33e1-4792-9b44-b088d77c0383.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1196.371898] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-486be729-ed30-4472-b8ba-d5228ab73d4b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.380437] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1196.380437] env[65758]: value = "task-4661371" [ 1196.380437] env[65758]: _type = "Task" [ 1196.380437] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.391268] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661371, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.432346] env[65758]: DEBUG nova.compute.manager [req-fa00135d-2a52-4a11-accc-0d046db4b497 req-cf7c1a1c-108c-4306-9822-a15fd9b76095 service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Received event network-vif-deleted-47faf1b5-f52f-4a76-818e-bd682ef2c632 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1196.432547] env[65758]: INFO nova.compute.manager [req-fa00135d-2a52-4a11-accc-0d046db4b497 req-cf7c1a1c-108c-4306-9822-a15fd9b76095 service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Neutron deleted interface 47faf1b5-f52f-4a76-818e-bd682ef2c632; detaching it from the instance and deleting it from the info cache [ 1196.432726] env[65758]: DEBUG nova.network.neutron [req-fa00135d-2a52-4a11-accc-0d046db4b497 req-cf7c1a1c-108c-4306-9822-a15fd9b76095 service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1196.472226] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.472508] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.472685] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.501501] env[65758]: DEBUG nova.network.neutron [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Updated VIF entry in instance network info cache for port 8bba2462-60e3-4a60-9eac-f9e7a6e5a898. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1196.502272] env[65758]: DEBUG nova.network.neutron [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Updating instance_info_cache with network_info: [{"id": "8bba2462-60e3-4a60-9eac-f9e7a6e5a898", "address": "fa:16:3e:fc:7a:8c", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bba2462-60", "ovs_interfaceid": "8bba2462-60e3-4a60-9eac-f9e7a6e5a898", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1196.504921] env[65758]: WARNING neutronclient.v2_0.client [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1196.505661] env[65758]: WARNING openstack [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1196.506058] env[65758]: WARNING openstack [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1196.515944] env[65758]: DEBUG oslo_concurrency.lockutils [req-8e480d43-fa0f-4467-9a5e-fb1344886d2d req-8dd339d6-971e-4181-b0df-badea8c5857b service nova] Releasing lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.525604] env[65758]: DEBUG nova.network.neutron [-] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1196.585183] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661370, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.596662] env[65758]: DEBUG nova.network.neutron [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updated VIF entry in instance network info cache for port 084d10cd-9734-4baf-91b3-892d54084a42. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1196.597070] env[65758]: DEBUG nova.network.neutron [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updating instance_info_cache with network_info: [{"id": "084d10cd-9734-4baf-91b3-892d54084a42", "address": "fa:16:3e:02:7e:09", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap084d10cd-97", "ovs_interfaceid": "084d10cd-9734-4baf-91b3-892d54084a42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1196.895456] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661371, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491922} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.895456] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 0c64f9ad-33e1-4792-9b44-b088d77c0383/0c64f9ad-33e1-4792-9b44-b088d77c0383.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1196.895456] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1196.895456] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea9e09e0-4b87-48f7-b3dd-bb04079d4eb9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.906127] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1196.906127] env[65758]: value = "task-4661372" [ 1196.906127] env[65758]: _type = "Task" [ 1196.906127] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.917850] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661372, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.936073] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ca3282d-edf9-485c-b5c8-4fdddbe2bc7f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.948247] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fab80e-23f7-4647-9ddc-8b392261eed3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.982600] env[65758]: DEBUG nova.compute.manager [req-fa00135d-2a52-4a11-accc-0d046db4b497 req-cf7c1a1c-108c-4306-9822-a15fd9b76095 service nova] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Detach interface failed, port_id=47faf1b5-f52f-4a76-818e-bd682ef2c632, reason: Instance 2014e795-5c62-47c2-9574-2f32ba29638d could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1197.028977] env[65758]: INFO nova.compute.manager [-] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Took 1.27 seconds to deallocate network for instance. [ 1197.089230] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661370, 'name': CreateVM_Task, 'duration_secs': 1.40265} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.089434] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9550f72-009c-4143-afe2-887727e5c071] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1197.089967] env[65758]: WARNING neutronclient.v2_0.client [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1197.090353] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.090503] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.090830] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1197.091429] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c139d7f2-46a9-412f-b122-e7dce1418b41 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.098055] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1197.098055] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5225a2c6-b3dc-d6fd-ad6d-f60e426d4e82" [ 1197.098055] env[65758]: _type = "Task" [ 1197.098055] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.102281] env[65758]: DEBUG oslo_concurrency.lockutils [req-9dd9489f-1460-4050-92a8-990a26c70774 req-943b5f1a-322f-414b-b767-734d711d6269 service nova] Releasing lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.112157] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5225a2c6-b3dc-d6fd-ad6d-f60e426d4e82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.421583] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661372, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075136} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.421927] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1197.423067] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13691241-9201-48f7-a3c9-db96d751068c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.450853] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 0c64f9ad-33e1-4792-9b44-b088d77c0383/0c64f9ad-33e1-4792-9b44-b088d77c0383.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.451214] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff111d92-a668-4e51-8deb-1ec92e06d7be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.471943] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1197.471943] env[65758]: value = "task-4661373" [ 1197.471943] env[65758]: _type = "Task" [ 1197.471943] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.480793] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661373, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.485414] env[65758]: WARNING neutronclient.v2_0.client [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1197.519607] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.519874] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.520126] env[65758]: DEBUG nova.network.neutron [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1197.534793] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.535145] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.535429] env[65758]: DEBUG nova.objects.instance [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lazy-loading 'resources' on Instance uuid 2014e795-5c62-47c2-9574-2f32ba29638d {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.610952] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5225a2c6-b3dc-d6fd-ad6d-f60e426d4e82, 'name': SearchDatastore_Task, 'duration_secs': 0.024941} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.611790] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.611790] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1197.612549] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.612549] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.612726] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1197.613066] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e831fc6-19f3-43ea-8823-fb774a55dd61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.624824] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1197.625211] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1197.626363] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86c0e765-75f7-4520-bfc5-8a266fd4ef34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.634154] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1197.634154] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]522c75bd-b958-f988-d838-0954be28ce74" [ 1197.634154] env[65758]: _type = "Task" [ 1197.634154] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.647294] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522c75bd-b958-f988-d838-0954be28ce74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.983106] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661373, 'name': ReconfigVM_Task, 'duration_secs': 0.317177} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.983553] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 0c64f9ad-33e1-4792-9b44-b088d77c0383/0c64f9ad-33e1-4792-9b44-b088d77c0383.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1197.984203] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59d466a2-1854-45d8-a1e9-7fa171c1ae51 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.991052] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1197.991052] env[65758]: value = "task-4661374" [ 1197.991052] env[65758]: _type = "Task" [ 1197.991052] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.000572] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661374, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.026020] env[65758]: WARNING neutronclient.v2_0.client [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1198.026645] env[65758]: WARNING openstack [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1198.026879] env[65758]: WARNING openstack [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1198.147277] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]522c75bd-b958-f988-d838-0954be28ce74, 'name': SearchDatastore_Task, 'duration_secs': 0.011518} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.152454] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-776f5eef-abad-433f-9f1f-f0e7112e57a1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.161326] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1198.161326] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52579709-b03b-1c48-f9df-852304a3e549" [ 1198.161326] env[65758]: _type = "Task" [ 1198.161326] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.176197] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52579709-b03b-1c48-f9df-852304a3e549, 'name': SearchDatastore_Task, 'duration_secs': 0.011092} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.176743] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.176983] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a9550f72-009c-4143-afe2-887727e5c071/a9550f72-009c-4143-afe2-887727e5c071.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1198.177279] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d922ef0-86f0-427c-9a35-d113e64dade4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.185195] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1198.185195] env[65758]: value = "task-4661375" [ 1198.185195] env[65758]: _type = "Task" [ 1198.185195] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.198206] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661375, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.203289] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb23b53-8952-4e10-a51f-1f2627262cbd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.220357] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5614cb-9b99-4179-957f-3e79863ddeab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.256895] env[65758]: WARNING neutronclient.v2_0.client [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1198.257854] env[65758]: WARNING openstack [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1198.258340] env[65758]: WARNING openstack [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1198.266807] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9b5595-3af4-4bd2-b67d-99d3468a5941 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.275621] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f276d32-8835-4c0c-b386-52b6d48cfb87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.291849] env[65758]: DEBUG nova.compute.provider_tree [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.433170] env[65758]: DEBUG nova.network.neutron [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [{"id": "b91df992-11ae-4d37-af24-380860864b45", "address": "fa:16:3e:13:ee:a2", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb91df992-11", "ovs_interfaceid": "b91df992-11ae-4d37-af24-380860864b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1198.503710] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661374, 'name': Rename_Task, 'duration_secs': 0.168385} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.504105] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1198.504445] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83700c09-3213-4bb8-b41f-6c98bde0af89 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.513667] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1198.513667] env[65758]: value = "task-4661376" [ 1198.513667] env[65758]: _type = "Task" [ 1198.513667] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.523848] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661376, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.695491] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661375, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.794904] env[65758]: DEBUG nova.scheduler.client.report [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.936507] env[65758]: DEBUG oslo_concurrency.lockutils [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.025513] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661376, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.196338] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661375, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527545} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.196708] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a9550f72-009c-4143-afe2-887727e5c071/a9550f72-009c-4143-afe2-887727e5c071.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1199.196956] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1199.197353] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93301984-247f-4100-8465-ccb9ca8fb155 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.205331] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1199.205331] env[65758]: value = "task-4661377" [ 1199.205331] env[65758]: _type = "Task" [ 1199.205331] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.217417] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661377, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.303631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.768s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.345137] env[65758]: INFO nova.scheduler.client.report [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted allocations for instance 2014e795-5c62-47c2-9574-2f32ba29638d [ 1199.450378] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b2703c-880a-4fcc-b455-eb3cbb56bb80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.459466] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed01968a-2770-4103-b802-e5a902367c5e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.527076] env[65758]: DEBUG oslo_vmware.api [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661376, 'name': PowerOnVM_Task, 'duration_secs': 0.90312} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.527415] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1199.528240] env[65758]: INFO nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Took 7.66 seconds to spawn the instance on the hypervisor. [ 1199.528240] env[65758]: DEBUG nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1199.528784] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e768168d-3bec-4fb9-acdd-db1a0ed9895b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.716039] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661377, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095698} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.716039] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1199.716609] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0e7f92-72fa-49cd-8131-a9022601b263 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.739137] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] a9550f72-009c-4143-afe2-887727e5c071/a9550f72-009c-4143-afe2-887727e5c071.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1199.739451] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44eeaae0-caa7-4b45-8d3f-18e620570d71 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.760069] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1199.760069] env[65758]: value = "task-4661378" [ 1199.760069] env[65758]: _type = "Task" [ 1199.760069] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.769734] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661378, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.855806] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5fa912cd-a2f6-45c9-8b04-65e53afd3de4 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "2014e795-5c62-47c2-9574-2f32ba29638d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.757s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.056206] env[65758]: INFO nova.compute.manager [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Took 13.46 seconds to build instance. [ 1200.061501] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "d1918f85-d122-4a84-88b3-f038e8c1149e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.061750] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "d1918f85-d122-4a84-88b3-f038e8c1149e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.272433] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661378, 'name': ReconfigVM_Task, 'duration_secs': 0.292599} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.273105] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Reconfigured VM instance instance-00000070 to attach disk [datastore2] a9550f72-009c-4143-afe2-887727e5c071/a9550f72-009c-4143-afe2-887727e5c071.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1200.273844] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28cba3a8-5736-4304-9d8c-316c4c96e8c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.281857] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1200.281857] env[65758]: value = "task-4661379" [ 1200.281857] env[65758]: _type = "Task" [ 1200.281857] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.290882] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661379, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.562047] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5e5c7e8a-5a36-4581-a93e-1787a4078f69 tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "0c64f9ad-33e1-4792-9b44-b088d77c0383" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.973s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.564725] env[65758]: DEBUG nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1200.593972] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "0c64f9ad-33e1-4792-9b44-b088d77c0383" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.594582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "0c64f9ad-33e1-4792-9b44-b088d77c0383" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.594582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "0c64f9ad-33e1-4792-9b44-b088d77c0383-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.594789] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "0c64f9ad-33e1-4792-9b44-b088d77c0383-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.594789] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "0c64f9ad-33e1-4792-9b44-b088d77c0383-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.597210] env[65758]: INFO nova.compute.manager [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Terminating instance [ 1200.618830] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ba0128-9b1f-4339-86b8-0d12e574dc87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.647943] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ef090e-e06c-49e3-9983-b577ee06023f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.656987] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance 'ba16e0fe-6748-4d14-bb28-a65d63a2274d' progress to 83 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1200.792974] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661379, 'name': Rename_Task, 'duration_secs': 0.144447} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.793289] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1200.793552] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c9486a9-62e9-4ff2-962c-0ad273a46d37 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.801079] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1200.801079] env[65758]: value = "task-4661380" [ 1200.801079] env[65758]: _type = "Task" [ 1200.801079] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.810202] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.086853] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.087151] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.088817] env[65758]: INFO nova.compute.claims [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1201.101671] env[65758]: DEBUG nova.compute.manager [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1201.102234] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.103044] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc63c726-7ff7-43f1-93df-e279f6ed7ef8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.112018] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.112339] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d33bbb6d-ae50-43f8-8687-1ebbedacae37 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.119448] env[65758]: DEBUG oslo_vmware.api [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1201.119448] env[65758]: value = "task-4661381" [ 1201.119448] env[65758]: _type = "Task" [ 1201.119448] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.129412] env[65758]: DEBUG oslo_vmware.api [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.163401] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1201.163632] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1bac239-0483-4848-a6a1-08b323e9a4a2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.173041] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1201.173041] env[65758]: value = "task-4661382" [ 1201.173041] env[65758]: _type = "Task" [ 1201.173041] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.183069] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661382, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.320106] env[65758]: DEBUG oslo_vmware.api [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661380, 'name': PowerOnVM_Task, 'duration_secs': 0.479867} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.320402] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1201.320601] env[65758]: INFO nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Took 8.19 seconds to spawn the instance on the hypervisor. [ 1201.320767] env[65758]: DEBUG nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1201.321605] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76031a6-5efc-48c2-9771-34ed2c64ea2f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.518100] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "d0cadaac-07dd-4478-a83e-80ba46d103b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.518347] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "d0cadaac-07dd-4478-a83e-80ba46d103b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.633434] env[65758]: DEBUG oslo_vmware.api [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661381, 'name': PowerOffVM_Task, 'duration_secs': 0.231603} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.633812] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.634073] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.634432] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e06f7fd-9bd4-4bd1-a50e-1d2d91a69b5c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.683821] env[65758]: DEBUG oslo_vmware.api [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661382, 'name': PowerOnVM_Task, 'duration_secs': 0.467916} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.684222] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1201.684446] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-9413c8d0-d5ca-4015-876c-ab2db210c0d8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance 'ba16e0fe-6748-4d14-bb28-a65d63a2274d' progress to 100 {{(pid=65758) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.708355] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.708744] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.709248] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Deleting the datastore file [datastore2] 0c64f9ad-33e1-4792-9b44-b088d77c0383 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.709797] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4365d0fa-b071-46e7-b944-eae74508c81d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.718458] env[65758]: DEBUG oslo_vmware.api [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for the task: (returnval){ [ 1201.718458] env[65758]: value = "task-4661384" [ 1201.718458] env[65758]: _type = "Task" [ 1201.718458] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.731726] env[65758]: DEBUG oslo_vmware.api [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.842963] env[65758]: INFO nova.compute.manager [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Took 14.80 seconds to build instance. [ 1202.020692] env[65758]: DEBUG nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1202.231288] env[65758]: DEBUG oslo_vmware.api [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Task: {'id': task-4661384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225704} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.231288] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.231792] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.232162] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.232476] env[65758]: INFO nova.compute.manager [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1202.232923] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1202.236175] env[65758]: DEBUG nova.compute.manager [-] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1202.236312] env[65758]: DEBUG nova.network.neutron [-] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1202.236675] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1202.237350] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1202.237752] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1202.265187] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd8d6e8-6c76-4cda-8cd9-94db0a213bb1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.273987] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf948f1d-3821-46b0-80ab-92acc0df56fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.280725] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1202.309329] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61c00d3-93e0-4f65-97ab-beabc54b1dd8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.317974] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbca6b6-fb4b-46f5-b715-168fc830cac0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.332784] env[65758]: DEBUG nova.compute.provider_tree [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.345166] env[65758]: DEBUG oslo_concurrency.lockutils [None req-abb34fab-5f1e-4d58-84c8-529206483c96 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.315s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.498538] env[65758]: DEBUG nova.compute.manager [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Received event network-changed-8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1202.498737] env[65758]: DEBUG nova.compute.manager [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Refreshing instance network info cache due to event network-changed-8bba2462-60e3-4a60-9eac-f9e7a6e5a898. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1202.498947] env[65758]: DEBUG oslo_concurrency.lockutils [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] Acquiring lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.499369] env[65758]: DEBUG oslo_concurrency.lockutils [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] Acquired lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1202.499529] env[65758]: DEBUG nova.network.neutron [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Refreshing network info cache for port 8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1202.542397] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.580647] env[65758]: DEBUG nova.compute.manager [req-68fb3123-3c7b-4f57-b11b-20ed22aaed6e req-856e8f73-8743-477b-bcb0-c3f9701337b0 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Received event network-vif-deleted-c67cebc7-ffed-46f4-83f9-32b2a75e0b87 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1202.580831] env[65758]: INFO nova.compute.manager [req-68fb3123-3c7b-4f57-b11b-20ed22aaed6e req-856e8f73-8743-477b-bcb0-c3f9701337b0 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Neutron deleted interface c67cebc7-ffed-46f4-83f9-32b2a75e0b87; detaching it from the instance and deleting it from the info cache [ 1202.581010] env[65758]: DEBUG nova.network.neutron [req-68fb3123-3c7b-4f57-b11b-20ed22aaed6e req-856e8f73-8743-477b-bcb0-c3f9701337b0 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1202.835711] env[65758]: DEBUG nova.scheduler.client.report [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1203.002710] env[65758]: WARNING neutronclient.v2_0.client [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1203.003543] env[65758]: WARNING openstack [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1203.003908] env[65758]: WARNING openstack [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1203.033688] env[65758]: DEBUG nova.network.neutron [-] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1203.084037] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-085e9be6-f3bc-401e-8e7a-48e455b836ef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.096297] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b5a6b4-be62-4f7d-abab-a517cda19f4c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.128266] env[65758]: DEBUG nova.compute.manager [req-68fb3123-3c7b-4f57-b11b-20ed22aaed6e req-856e8f73-8743-477b-bcb0-c3f9701337b0 service nova] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Detach interface failed, port_id=c67cebc7-ffed-46f4-83f9-32b2a75e0b87, reason: Instance 0c64f9ad-33e1-4792-9b44-b088d77c0383 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1203.210696] env[65758]: WARNING neutronclient.v2_0.client [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1203.211401] env[65758]: WARNING openstack [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1203.211751] env[65758]: WARNING openstack [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1203.295712] env[65758]: DEBUG nova.network.neutron [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Updated VIF entry in instance network info cache for port 8bba2462-60e3-4a60-9eac-f9e7a6e5a898. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1203.296128] env[65758]: DEBUG nova.network.neutron [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Updating instance_info_cache with network_info: [{"id": "8bba2462-60e3-4a60-9eac-f9e7a6e5a898", "address": "fa:16:3e:fc:7a:8c", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bba2462-60", "ovs_interfaceid": "8bba2462-60e3-4a60-9eac-f9e7a6e5a898", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1203.341399] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.341900] env[65758]: DEBUG nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1203.345910] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.804s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.348206] env[65758]: INFO nova.compute.claims [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1203.537633] env[65758]: INFO nova.compute.manager [-] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Took 1.30 seconds to deallocate network for instance. [ 1203.557566] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1203.593459] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1203.593870] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1203.594149] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1203.630679] env[65758]: DEBUG nova.network.neutron [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Port b91df992-11ae-4d37-af24-380860864b45 binding to destination host cpu-1 is already ACTIVE {{(pid=65758) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3278}} [ 1203.630948] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.631112] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.631275] env[65758]: DEBUG nova.network.neutron [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1203.798899] env[65758]: DEBUG oslo_concurrency.lockutils [req-1bc12de4-5f0d-4ca9-9654-f734fe84b95b req-ae22d952-3e2d-4e0b-80e4-0d5ee4ceaada service nova] Releasing lock "refresh_cache-a9550f72-009c-4143-afe2-887727e5c071" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.854992] env[65758]: DEBUG nova.compute.utils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1203.859050] env[65758]: DEBUG nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1203.859302] env[65758]: DEBUG nova.network.neutron [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1203.859621] env[65758]: WARNING neutronclient.v2_0.client [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1203.859957] env[65758]: WARNING neutronclient.v2_0.client [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1203.860581] env[65758]: WARNING openstack [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1203.860936] env[65758]: WARNING openstack [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1203.921588] env[65758]: DEBUG nova.policy [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fabcc2dbd0b49668d0c46f577f88152', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3dc40c4af744624b6c320390d0cb210', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1204.045118] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.048166] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7351bb0b-c0e5-43db-a3c0-d27a284f35b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.059508] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b77395-eaaa-4295-9af2-9866c3b16aaa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.102890] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35843dc7-bfdc-4880-9c6c-942773f66338 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.112208] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a224c50e-854b-44a9-8ce2-ee3efb9b036d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.128291] env[65758]: DEBUG nova.compute.provider_tree [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.134483] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1204.135297] env[65758]: WARNING openstack [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1204.135735] env[65758]: WARNING openstack [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1204.359729] env[65758]: DEBUG nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1204.366455] env[65758]: DEBUG nova.network.neutron [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Successfully created port: 3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1204.386470] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1204.387175] env[65758]: WARNING openstack [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1204.387816] env[65758]: WARNING openstack [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1204.474949] env[65758]: DEBUG nova.network.neutron [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [{"id": "b91df992-11ae-4d37-af24-380860864b45", "address": "fa:16:3e:13:ee:a2", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb91df992-11", "ovs_interfaceid": "b91df992-11ae-4d37-af24-380860864b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1204.631686] env[65758]: DEBUG nova.scheduler.client.report [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1204.977546] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.137344] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.791s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.137907] env[65758]: DEBUG nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1205.140619] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.096s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.140828] env[65758]: DEBUG nova.objects.instance [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lazy-loading 'resources' on Instance uuid 0c64f9ad-33e1-4792-9b44-b088d77c0383 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.373293] env[65758]: DEBUG nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1205.399286] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1205.399584] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1205.399756] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1205.399992] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1205.400148] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1205.400296] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1205.400502] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1205.400655] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1205.400816] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1205.400977] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1205.401164] env[65758]: DEBUG nova.virt.hardware [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1205.402099] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241a5155-2bf0-481d-a387-c8408a995d3c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.416771] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cef8c7d-e9a7-4156-a174-419dea47611d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.481490] env[65758]: DEBUG nova.compute.manager [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=65758) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:925}} [ 1205.644528] env[65758]: DEBUG nova.compute.utils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1205.649105] env[65758]: DEBUG nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1205.649344] env[65758]: DEBUG nova.network.neutron [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1205.649689] env[65758]: WARNING neutronclient.v2_0.client [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1205.649973] env[65758]: WARNING neutronclient.v2_0.client [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1205.650759] env[65758]: WARNING openstack [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1205.651154] env[65758]: WARNING openstack [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1205.696612] env[65758]: DEBUG nova.policy [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc85d2d1d84f4df0b4de5e6388bb9398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82bfbb5ee6714c9aa5119cb714d28ce2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1205.768692] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d44742-8c11-4775-9dfb-555d687b76d4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.789498] env[65758]: DEBUG nova.compute.manager [req-de5a11b4-368e-4448-81fa-998e39052952 req-2fb64776-ea7a-43ec-99b8-c12bb99288b1 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Received event network-vif-plugged-3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1205.789736] env[65758]: DEBUG oslo_concurrency.lockutils [req-de5a11b4-368e-4448-81fa-998e39052952 req-2fb64776-ea7a-43ec-99b8-c12bb99288b1 service nova] Acquiring lock "d1918f85-d122-4a84-88b3-f038e8c1149e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.790377] env[65758]: DEBUG oslo_concurrency.lockutils [req-de5a11b4-368e-4448-81fa-998e39052952 req-2fb64776-ea7a-43ec-99b8-c12bb99288b1 service nova] Lock "d1918f85-d122-4a84-88b3-f038e8c1149e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.790591] env[65758]: DEBUG oslo_concurrency.lockutils [req-de5a11b4-368e-4448-81fa-998e39052952 req-2fb64776-ea7a-43ec-99b8-c12bb99288b1 service nova] Lock "d1918f85-d122-4a84-88b3-f038e8c1149e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.790885] env[65758]: DEBUG nova.compute.manager [req-de5a11b4-368e-4448-81fa-998e39052952 req-2fb64776-ea7a-43ec-99b8-c12bb99288b1 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] No waiting events found dispatching network-vif-plugged-3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1205.790957] env[65758]: WARNING nova.compute.manager [req-de5a11b4-368e-4448-81fa-998e39052952 req-2fb64776-ea7a-43ec-99b8-c12bb99288b1 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Received unexpected event network-vif-plugged-3d310661-ff77-40cb-b141-66ffbfd71a3f for instance with vm_state building and task_state spawning. [ 1205.792787] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f7ff34-1127-430f-892b-ba915adb167b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.825670] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d1976a-db0b-43cc-971a-77ace144543b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.834700] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5166f03e-44df-4e12-a779-50766edcc868 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.851707] env[65758]: DEBUG nova.compute.provider_tree [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.863301] env[65758]: DEBUG nova.network.neutron [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Successfully updated port: 3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1205.912038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.912038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.912278] env[65758]: INFO nova.compute.manager [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Shelving [ 1206.024488] env[65758]: DEBUG nova.network.neutron [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Successfully created port: b35c5d07-8b04-492b-961d-93f05229c072 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1206.149818] env[65758]: DEBUG nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1206.355213] env[65758]: DEBUG nova.scheduler.client.report [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1206.365710] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.365924] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquired lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.366116] env[65758]: DEBUG nova.network.neutron [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1206.585264] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.860615] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.720s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.863231] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.278s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.869127] env[65758]: WARNING openstack [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1206.869619] env[65758]: WARNING openstack [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1206.889702] env[65758]: INFO nova.scheduler.client.report [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Deleted allocations for instance 0c64f9ad-33e1-4792-9b44-b088d77c0383 [ 1206.909872] env[65758]: DEBUG nova.network.neutron [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1206.922566] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.923542] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c94fc3d2-8fc6-4000-8315-1d5f17a766f6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.932408] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1206.932408] env[65758]: value = "task-4661385" [ 1206.932408] env[65758]: _type = "Task" [ 1206.932408] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.946715] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661385, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.991473] env[65758]: WARNING neutronclient.v2_0.client [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1206.992219] env[65758]: WARNING openstack [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1206.992588] env[65758]: WARNING openstack [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1207.127763] env[65758]: DEBUG nova.network.neutron [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [{"id": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "address": "fa:16:3e:5a:2e:66", "network": {"id": "4cc217d8-93be-4512-ae15-e75bcfb83095", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-70005608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f3dc40c4af744624b6c320390d0cb210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d310661-ff", "ovs_interfaceid": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1207.167336] env[65758]: DEBUG nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1207.196496] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1207.196727] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1207.196874] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1207.197065] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1207.197207] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1207.197429] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1207.197631] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1207.197782] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1207.197940] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1207.198108] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1207.198276] env[65758]: DEBUG nova.virt.hardware [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1207.199163] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e227c5a-7d02-41c6-b537-4e51b6e6e085 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.208070] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67bc1fb-41b0-4dc5-8523-6835e7735694 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.366352] env[65758]: DEBUG nova.objects.instance [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'migration_context' on Instance uuid ba16e0fe-6748-4d14-bb28-a65d63a2274d {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1207.398727] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c78b5fce-47f9-4687-bb39-f9157bb88faa tempest-ServerGroupTestJSON-1408111198 tempest-ServerGroupTestJSON-1408111198-project-member] Lock "0c64f9ad-33e1-4792-9b44-b088d77c0383" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.804s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.446136] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661385, 'name': PowerOffVM_Task, 'duration_secs': 0.199503} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.446494] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1207.447416] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52616b20-2c88-422e-a2e6-4f9c96919edc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.468552] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d195161b-394e-4927-b6a5-6d2d53b4280f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.519739] env[65758]: DEBUG nova.network.neutron [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Successfully updated port: b35c5d07-8b04-492b-961d-93f05229c072 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1207.630578] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Releasing lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.630949] env[65758]: DEBUG nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Instance network_info: |[{"id": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "address": "fa:16:3e:5a:2e:66", "network": {"id": "4cc217d8-93be-4512-ae15-e75bcfb83095", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-70005608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f3dc40c4af744624b6c320390d0cb210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d310661-ff", "ovs_interfaceid": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1207.631404] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:2e:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24727047-6358-4015-86c1-394ab07fb88f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d310661-ff77-40cb-b141-66ffbfd71a3f', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1207.639448] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Creating folder: Project (f3dc40c4af744624b6c320390d0cb210). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1207.639740] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a67f5a71-530c-44d2-b06b-fc02269e07b9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.650894] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Created folder: Project (f3dc40c4af744624b6c320390d0cb210) in parent group-v909763. [ 1207.651103] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Creating folder: Instances. Parent ref: group-v910075. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1207.651388] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eef754ea-078f-4629-99a8-5f569097ef46 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.662175] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Created folder: Instances in parent group-v910075. [ 1207.662459] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1207.662670] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1207.662928] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1532b693-3aad-4f0c-9283-3af00f702381 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.684093] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1207.684093] env[65758]: value = "task-4661388" [ 1207.684093] env[65758]: _type = "Task" [ 1207.684093] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.692557] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661388, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.828368] env[65758]: DEBUG nova.compute.manager [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Received event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1207.828700] env[65758]: DEBUG nova.compute.manager [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing instance network info cache due to event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1207.829118] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Acquiring lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.829387] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Acquired lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.829694] env[65758]: DEBUG nova.network.neutron [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1207.982224] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1207.982858] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7f6165ae-ea7d-4acd-92e8-172785800c39 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.991602] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1207.991602] env[65758]: value = "task-4661389" [ 1207.991602] env[65758]: _type = "Task" [ 1207.991602] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.005320] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661389, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.011440] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d6c42b-5ffe-4210-95d0-e985436b510d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.021240] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9391c4-0e71-4319-8ca9-e31150aeffaa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.026360] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "refresh_cache-d0cadaac-07dd-4478-a83e-80ba46d103b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.026569] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "refresh_cache-d0cadaac-07dd-4478-a83e-80ba46d103b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.026714] env[65758]: DEBUG nova.network.neutron [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1208.063791] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712cb5d6-1230-449e-8419-97080c1c83fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.074317] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4900f1e-8ea4-417f-a107-f9baa1d3f12f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.090503] env[65758]: DEBUG nova.compute.provider_tree [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.199510] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661388, 'name': CreateVM_Task, 'duration_secs': 0.396945} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.199790] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1208.200594] env[65758]: WARNING neutronclient.v2_0.client [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1208.201159] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.201437] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.201937] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1208.202341] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a342f367-4548-4e4e-9e0b-3e002336bd2b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.209498] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1208.209498] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5265dec9-d5ab-3d0e-b169-c3347c5fab3f" [ 1208.209498] env[65758]: _type = "Task" [ 1208.209498] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.220390] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5265dec9-d5ab-3d0e-b169-c3347c5fab3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.333045] env[65758]: WARNING neutronclient.v2_0.client [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1208.333673] env[65758]: WARNING openstack [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1208.334097] env[65758]: WARNING openstack [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1208.480604] env[65758]: WARNING neutronclient.v2_0.client [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1208.481299] env[65758]: WARNING openstack [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1208.481659] env[65758]: WARNING openstack [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1208.502687] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661389, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.530916] env[65758]: WARNING openstack [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1208.531352] env[65758]: WARNING openstack [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1208.593779] env[65758]: DEBUG nova.scheduler.client.report [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1208.627643] env[65758]: DEBUG nova.network.neutron [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1208.662773] env[65758]: DEBUG nova.network.neutron [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updated VIF entry in instance network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1208.663303] env[65758]: DEBUG nova.network.neutron [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [{"id": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "address": "fa:16:3e:5a:2e:66", "network": {"id": "4cc217d8-93be-4512-ae15-e75bcfb83095", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-70005608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f3dc40c4af744624b6c320390d0cb210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d310661-ff", "ovs_interfaceid": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1208.720765] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5265dec9-d5ab-3d0e-b169-c3347c5fab3f, 'name': SearchDatastore_Task, 'duration_secs': 0.012204} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.721089] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.721334] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1208.721570] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.721714] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.721888] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1208.722194] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec461c3f-abe8-4e93-879d-f8826cca9fea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.731545] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1208.731747] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1208.733536] env[65758]: WARNING neutronclient.v2_0.client [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1208.734186] env[65758]: WARNING openstack [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1208.734538] env[65758]: WARNING openstack [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1208.742376] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85925c89-7bbf-40ff-b4a7-60a11b9d04b7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.751226] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1208.751226] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5265224f-5bcd-baf0-d3b0-4a43f1ed5d0a" [ 1208.751226] env[65758]: _type = "Task" [ 1208.751226] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.760465] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5265224f-5bcd-baf0-d3b0-4a43f1ed5d0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.822364] env[65758]: DEBUG nova.network.neutron [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Updating instance_info_cache with network_info: [{"id": "b35c5d07-8b04-492b-961d-93f05229c072", "address": "fa:16:3e:b5:21:b4", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35c5d07-8b", "ovs_interfaceid": "b35c5d07-8b04-492b-961d-93f05229c072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1209.005771] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661389, 'name': CreateSnapshot_Task, 'duration_secs': 0.629993} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.006311] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1209.008176] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24bf85c-90f3-4bfb-8d2b-fe9c772a2bf1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.166778] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Releasing lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.167060] env[65758]: DEBUG nova.compute.manager [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Received event network-vif-plugged-b35c5d07-8b04-492b-961d-93f05229c072 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1209.167375] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Acquiring lock "d0cadaac-07dd-4478-a83e-80ba46d103b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.167675] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Lock "d0cadaac-07dd-4478-a83e-80ba46d103b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.167921] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Lock "d0cadaac-07dd-4478-a83e-80ba46d103b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.168292] env[65758]: DEBUG nova.compute.manager [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] No waiting events found dispatching network-vif-plugged-b35c5d07-8b04-492b-961d-93f05229c072 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1209.168516] env[65758]: WARNING nova.compute.manager [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Received unexpected event network-vif-plugged-b35c5d07-8b04-492b-961d-93f05229c072 for instance with vm_state building and task_state spawning. [ 1209.168785] env[65758]: DEBUG nova.compute.manager [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Received event network-changed-b35c5d07-8b04-492b-961d-93f05229c072 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1209.169145] env[65758]: DEBUG nova.compute.manager [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Refreshing instance network info cache due to event network-changed-b35c5d07-8b04-492b-961d-93f05229c072. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1209.169357] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Acquiring lock "refresh_cache-d0cadaac-07dd-4478-a83e-80ba46d103b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.264061] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5265224f-5bcd-baf0-d3b0-4a43f1ed5d0a, 'name': SearchDatastore_Task, 'duration_secs': 0.009605} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.265062] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e249ca47-c746-4553-834f-1b373144259f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.272156] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1209.272156] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e36429-84f8-5bcb-515e-8532660f5b4a" [ 1209.272156] env[65758]: _type = "Task" [ 1209.272156] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.280788] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e36429-84f8-5bcb-515e-8532660f5b4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.325954] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "refresh_cache-d0cadaac-07dd-4478-a83e-80ba46d103b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.327698] env[65758]: DEBUG nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Instance network_info: |[{"id": "b35c5d07-8b04-492b-961d-93f05229c072", "address": "fa:16:3e:b5:21:b4", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35c5d07-8b", "ovs_interfaceid": "b35c5d07-8b04-492b-961d-93f05229c072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1209.328138] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Acquired lock "refresh_cache-d0cadaac-07dd-4478-a83e-80ba46d103b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.328330] env[65758]: DEBUG nova.network.neutron [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Refreshing network info cache for port b35c5d07-8b04-492b-961d-93f05229c072 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1209.329694] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:21:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b35c5d07-8b04-492b-961d-93f05229c072', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.339628] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1209.341460] env[65758]: WARNING neutronclient.v2_0.client [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1209.342191] env[65758]: WARNING openstack [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1209.345159] env[65758]: WARNING openstack [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1209.351799] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1209.352691] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4192512a-71ac-4ecb-8a50-4d4beda63cc1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.376080] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.376080] env[65758]: value = "task-4661390" [ 1209.376080] env[65758]: _type = "Task" [ 1209.376080] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.386641] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661390, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.531077] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1209.535093] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cb55e5ed-855a-4285-b246-bbd254955feb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.546724] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1209.546724] env[65758]: value = "task-4661391" [ 1209.546724] env[65758]: _type = "Task" [ 1209.546724] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.557697] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661391, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.606673] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.743s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.622583] env[65758]: WARNING neutronclient.v2_0.client [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1209.622583] env[65758]: WARNING openstack [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1209.622583] env[65758]: WARNING openstack [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1209.749688] env[65758]: DEBUG nova.network.neutron [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Updated VIF entry in instance network info cache for port b35c5d07-8b04-492b-961d-93f05229c072. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1209.750172] env[65758]: DEBUG nova.network.neutron [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Updating instance_info_cache with network_info: [{"id": "b35c5d07-8b04-492b-961d-93f05229c072", "address": "fa:16:3e:b5:21:b4", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35c5d07-8b", "ovs_interfaceid": "b35c5d07-8b04-492b-961d-93f05229c072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1209.783721] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e36429-84f8-5bcb-515e-8532660f5b4a, 'name': SearchDatastore_Task, 'duration_secs': 0.011798} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.784015] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.784307] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e/d1918f85-d122-4a84-88b3-f038e8c1149e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1209.784614] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6848e83-9ef3-411f-8df7-4b0362cdb6ef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.793890] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1209.793890] env[65758]: value = "task-4661392" [ 1209.793890] env[65758]: _type = "Task" [ 1209.793890] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.804692] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.888899] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661390, 'name': CreateVM_Task, 'duration_secs': 0.375661} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.889122] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1209.889689] env[65758]: WARNING neutronclient.v2_0.client [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1209.890182] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.890340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.890694] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1209.891655] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7bc0a8b-577a-4a3d-96b4-4c704536c30c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.897846] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1209.897846] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52df5323-edd4-9585-c1d2-aeb2d688de06" [ 1209.897846] env[65758]: _type = "Task" [ 1209.897846] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.908057] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52df5323-edd4-9585-c1d2-aeb2d688de06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.059713] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661391, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.253662] env[65758]: DEBUG oslo_concurrency.lockutils [req-be2f3bdc-baf0-4f2a-8576-7db37f0243db req-62f271ee-0c8c-4766-b58d-c25d2bb1ec2f service nova] Releasing lock "refresh_cache-d0cadaac-07dd-4478-a83e-80ba46d103b2" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.305391] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661392, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.409342] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52df5323-edd4-9585-c1d2-aeb2d688de06, 'name': SearchDatastore_Task, 'duration_secs': 0.011615} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.409766] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.410085] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1210.410343] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.410506] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.410687] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1210.410989] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c4b16d2-d858-4e12-8a83-5c30c3a7d0f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.420349] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1210.420553] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1210.421346] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfab556f-ce6d-4532-9bd6-c4cdf28f6d62 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.428299] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1210.428299] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52203c68-f159-6b91-ddca-9779c140a922" [ 1210.428299] env[65758]: _type = "Task" [ 1210.428299] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.437113] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52203c68-f159-6b91-ddca-9779c140a922, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.560559] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661391, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.805184] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661392, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610144} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.805560] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e/d1918f85-d122-4a84-88b3-f038e8c1149e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1210.805662] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1210.805939] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1baf9ba-db47-4ee1-bacc-2333f4f91d09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.812822] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1210.812822] env[65758]: value = "task-4661393" [ 1210.812822] env[65758]: _type = "Task" [ 1210.812822] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.821910] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.940614] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52203c68-f159-6b91-ddca-9779c140a922, 'name': SearchDatastore_Task, 'duration_secs': 0.012995} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.941606] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ced8ba69-ef2e-410d-ba96-afc1f624ff58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.948575] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1210.948575] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c4ca43-39a7-4d40-231a-66443268f196" [ 1210.948575] env[65758]: _type = "Task" [ 1210.948575] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.959560] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c4ca43-39a7-4d40-231a-66443268f196, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.059652] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661391, 'name': CloneVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.170124] env[65758]: INFO nova.compute.manager [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Swapping old allocation on dict_keys(['0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51']) held by migration 75fd58b3-37ba-47e0-bee0-bec414fd08b6 for instance [ 1211.194318] env[65758]: DEBUG nova.scheduler.client.report [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Overwriting current allocation {'allocations': {'0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 161}}, 'project_id': '4095654557a34bb0907071aedb3bb678', 'user_id': '91358f51732f44198a020f6669168408', 'consumer_generation': 1} on consumer ba16e0fe-6748-4d14-bb28-a65d63a2274d {{(pid=65758) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1211.248048] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1211.295091] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.295306] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.295483] env[65758]: DEBUG nova.network.neutron [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1211.331659] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.304985} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.331999] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1211.333455] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97bca5a-eeee-400c-aa2e-4fc59d412c69 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.358147] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e/d1918f85-d122-4a84-88b3-f038e8c1149e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1211.358523] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eef0ed2e-27dd-44a3-939b-f46b1ed90459 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.382071] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1211.382071] env[65758]: value = "task-4661394" [ 1211.382071] env[65758]: _type = "Task" [ 1211.382071] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.391362] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661394, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.459325] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c4ca43-39a7-4d40-231a-66443268f196, 'name': SearchDatastore_Task, 'duration_secs': 0.012475} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.460542] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.461021] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] d0cadaac-07dd-4478-a83e-80ba46d103b2/d0cadaac-07dd-4478-a83e-80ba46d103b2.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1211.461376] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c03e76c-27f9-4dad-93c0-72956c0df7da {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.469057] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1211.469057] env[65758]: value = "task-4661395" [ 1211.469057] env[65758]: _type = "Task" [ 1211.469057] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.478452] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.559888] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661391, 'name': CloneVM_Task, 'duration_secs': 1.521345} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.560089] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Created linked-clone VM from snapshot [ 1211.560858] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92a793d-692a-40db-adf0-6b75eeb5f0d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.569985] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Uploading image 81bd8cd1-7783-4705-af00-19222b217ece {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1211.598446] env[65758]: DEBUG oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1211.598446] env[65758]: value = "vm-910080" [ 1211.598446] env[65758]: _type = "VirtualMachine" [ 1211.598446] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1211.598811] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2ca83e23-469d-4342-bd4c-6705fd29d8de {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.607124] env[65758]: DEBUG oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lease: (returnval){ [ 1211.607124] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525c6c5c-cd2c-556d-5ea1-3d456647b952" [ 1211.607124] env[65758]: _type = "HttpNfcLease" [ 1211.607124] env[65758]: } obtained for exporting VM: (result){ [ 1211.607124] env[65758]: value = "vm-910080" [ 1211.607124] env[65758]: _type = "VirtualMachine" [ 1211.607124] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1211.607440] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the lease: (returnval){ [ 1211.607440] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525c6c5c-cd2c-556d-5ea1-3d456647b952" [ 1211.607440] env[65758]: _type = "HttpNfcLease" [ 1211.607440] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1211.615873] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1211.615873] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525c6c5c-cd2c-556d-5ea1-3d456647b952" [ 1211.615873] env[65758]: _type = "HttpNfcLease" [ 1211.615873] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1211.799024] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1211.799836] env[65758]: WARNING openstack [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1211.800221] env[65758]: WARNING openstack [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1211.899802] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661394, 'name': ReconfigVM_Task, 'duration_secs': 0.30061} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.900219] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Reconfigured VM instance instance-00000071 to attach disk [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e/d1918f85-d122-4a84-88b3-f038e8c1149e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1211.900992] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-641ec5db-be3e-48ca-86e9-db8fe42d0da8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.911625] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1211.911625] env[65758]: value = "task-4661397" [ 1211.911625] env[65758]: _type = "Task" [ 1211.911625] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.923377] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661397, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.981256] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497322} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.981560] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] d0cadaac-07dd-4478-a83e-80ba46d103b2/d0cadaac-07dd-4478-a83e-80ba46d103b2.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1211.981787] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1211.982096] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-10dbdc96-3c50-4112-927a-b79bd06b873f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.990255] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1211.990255] env[65758]: value = "task-4661398" [ 1211.990255] env[65758]: _type = "Task" [ 1211.990255] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.999181] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.060183] env[65758]: WARNING neutronclient.v2_0.client [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1212.060923] env[65758]: WARNING openstack [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1212.061339] env[65758]: WARNING openstack [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1212.118432] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1212.118432] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525c6c5c-cd2c-556d-5ea1-3d456647b952" [ 1212.118432] env[65758]: _type = "HttpNfcLease" [ 1212.118432] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1212.118432] env[65758]: DEBUG oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1212.118432] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525c6c5c-cd2c-556d-5ea1-3d456647b952" [ 1212.118432] env[65758]: _type = "HttpNfcLease" [ 1212.118432] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1212.119227] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fde234-6a7d-4415-acfb-e44c996ba857 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.129401] env[65758]: DEBUG oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa4479-0e17-bb8b-c995-bb95a4e78b44/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1212.129651] env[65758]: DEBUG oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa4479-0e17-bb8b-c995-bb95a4e78b44/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1212.189295] env[65758]: DEBUG nova.network.neutron [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [{"id": "b91df992-11ae-4d37-af24-380860864b45", "address": "fa:16:3e:13:ee:a2", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb91df992-11", "ovs_interfaceid": "b91df992-11ae-4d37-af24-380860864b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1212.255900] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d48e20e8-70c3-4b2d-89b3-c51a4b9ec979 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.423075] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661397, 'name': Rename_Task, 'duration_secs': 0.174634} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.424243] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1212.424243] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6957b54-964a-4704-a978-2a4a4b808c84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.432058] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1212.432058] env[65758]: value = "task-4661399" [ 1212.432058] env[65758]: _type = "Task" [ 1212.432058] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.442654] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.503319] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089317} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.504654] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1212.504654] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d672a6a-8001-47c3-8d09-2112164c34f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.531048] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] d0cadaac-07dd-4478-a83e-80ba46d103b2/d0cadaac-07dd-4478-a83e-80ba46d103b2.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1212.531563] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06a43eb8-0dd6-4fcc-b49f-072be53635e8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.553217] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1212.553217] env[65758]: value = "task-4661400" [ 1212.553217] env[65758]: _type = "Task" [ 1212.553217] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.563959] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661400, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.693695] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-ba16e0fe-6748-4d14-bb28-a65d63a2274d" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.694830] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c761508-1521-446c-892b-2f7a9b94bfa7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.703235] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2730196-9446-4f14-af94-e048adf02fd6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.947066] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661399, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.063968] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661400, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.311582] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.311866] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.454136] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661399, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.570405] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661400, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.814742] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.815132] env[65758]: DEBUG nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1213.817786] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43bd7bde-3082-409c-b5bb-46c7e271155b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.829201] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1213.829201] env[65758]: value = "task-4661401" [ 1213.829201] env[65758]: _type = "Task" [ 1213.829201] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.843140] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.947163] env[65758]: DEBUG oslo_vmware.api [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661399, 'name': PowerOnVM_Task, 'duration_secs': 1.143139} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.947675] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1213.947893] env[65758]: INFO nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1213.948105] env[65758]: DEBUG nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1213.948980] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe13ce4-0928-4e98-830b-b79e15933682 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.068034] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661400, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.342539] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661401, 'name': PowerOffVM_Task, 'duration_secs': 0.341179} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.344873] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.345555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.347366] env[65758]: INFO nova.compute.claims [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1214.350461] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.351139] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1214.351457] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1214.351826] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1214.352153] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1214.352385] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1214.352643] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1214.352883] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1214.353155] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1214.353357] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1214.353567] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1214.353851] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1214.359605] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-509504dd-f80e-4159-950e-9a1e800afede {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.377181] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1214.377181] env[65758]: value = "task-4661402" [ 1214.377181] env[65758]: _type = "Task" [ 1214.377181] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.386931] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.470672] env[65758]: INFO nova.compute.manager [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Took 13.40 seconds to build instance. [ 1214.567406] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661400, 'name': ReconfigVM_Task, 'duration_secs': 1.515165} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.567855] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Reconfigured VM instance instance-00000072 to attach disk [datastore2] d0cadaac-07dd-4478-a83e-80ba46d103b2/d0cadaac-07dd-4478-a83e-80ba46d103b2.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.568764] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e990f2f-5ae4-4029-b6c6-8502177e1bb6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.577601] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1214.577601] env[65758]: value = "task-4661403" [ 1214.577601] env[65758]: _type = "Task" [ 1214.577601] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.589060] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661403, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.889069] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661402, 'name': ReconfigVM_Task, 'duration_secs': 0.199325} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.889818] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9966311-3b16-4fd9-9dfb-f631873b11f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.915659] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1214.915904] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1214.916091] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1214.916309] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1214.916428] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1214.916571] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1214.916784] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1214.917707] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1214.917707] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1214.917707] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1214.917707] env[65758]: DEBUG nova.virt.hardware [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1214.920045] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6669c4c3-7f78-4bac-aa6c-3842101b5af8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.927355] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1214.927355] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52011934-0131-fec0-50a8-0bdb3202dbfe" [ 1214.927355] env[65758]: _type = "Task" [ 1214.927355] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.937389] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52011934-0131-fec0-50a8-0bdb3202dbfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.973599] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e6630f09-a97f-4b7e-a653-9e54ff9be93e tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "d1918f85-d122-4a84-88b3-f038e8c1149e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.912s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.089217] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661403, 'name': Rename_Task, 'duration_secs': 0.15598} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.089461] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1215.089717] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e97c25d-3969-4c68-97f9-74dc63ad057b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.097571] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1215.097571] env[65758]: value = "task-4661404" [ 1215.097571] env[65758]: _type = "Task" [ 1215.097571] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.106912] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661404, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.438871] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52011934-0131-fec0-50a8-0bdb3202dbfe, 'name': SearchDatastore_Task, 'duration_secs': 0.032335} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.447305] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1215.447890] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-328d5c54-42a0-4a90-9d25-a27f07c19c91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.470622] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1215.470622] env[65758]: value = "task-4661405" [ 1215.470622] env[65758]: _type = "Task" [ 1215.470622] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.481066] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661405, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.506037] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679620ae-9726-4d78-9ee9-6d1d901e77e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.515079] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4389d4ab-8665-4a95-bd07-5fb093618e34 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.557083] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee29faa-4c0a-4e6f-9c60-83e2939ea8a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.567071] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c2bb63-b045-4591-9aa4-221ea19fc7d6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.584594] env[65758]: DEBUG nova.compute.provider_tree [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.608490] env[65758]: DEBUG oslo_vmware.api [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661404, 'name': PowerOnVM_Task, 'duration_secs': 0.482518} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.608720] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1215.608936] env[65758]: INFO nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Took 8.44 seconds to spawn the instance on the hypervisor. [ 1215.609128] env[65758]: DEBUG nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1215.609921] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ed309a-7abe-4311-aa52-f84a07340a80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.782952] env[65758]: INFO nova.compute.manager [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Rescuing [ 1215.783393] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.783577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquired lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.783752] env[65758]: DEBUG nova.network.neutron [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1215.987098] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661405, 'name': ReconfigVM_Task, 'duration_secs': 0.303337} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.987098] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1215.987098] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5726b6e2-7ca2-442c-99f1-3d3846da2b97 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.015029] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d/ba16e0fe-6748-4d14-bb28-a65d63a2274d.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1216.015622] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb481521-8f0f-482e-b3ca-6173840177d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.036686] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1216.036686] env[65758]: value = "task-4661406" [ 1216.036686] env[65758]: _type = "Task" [ 1216.036686] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.045982] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661406, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.088812] env[65758]: DEBUG nova.scheduler.client.report [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1216.127465] env[65758]: INFO nova.compute.manager [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Took 13.60 seconds to build instance. [ 1216.287396] env[65758]: WARNING neutronclient.v2_0.client [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1216.288306] env[65758]: WARNING openstack [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1216.288764] env[65758]: WARNING openstack [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1216.455122] env[65758]: WARNING neutronclient.v2_0.client [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1216.455697] env[65758]: WARNING openstack [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1216.456214] env[65758]: WARNING openstack [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1216.548173] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661406, 'name': ReconfigVM_Task, 'duration_secs': 0.473072} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.548536] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d/ba16e0fe-6748-4d14-bb28-a65d63a2274d.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1216.549624] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd8dd93-3722-4ae7-988a-9328a778eb5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.577186] env[65758]: DEBUG nova.network.neutron [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [{"id": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "address": "fa:16:3e:5a:2e:66", "network": {"id": "4cc217d8-93be-4512-ae15-e75bcfb83095", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-70005608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f3dc40c4af744624b6c320390d0cb210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d310661-ff", "ovs_interfaceid": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1216.579085] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092abc91-b286-41d4-af01-7f53f4e9a1a8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.603314] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.603863] env[65758]: DEBUG nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1216.607731] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb6481e-11b4-4fb1-acfd-36872213dc4d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.630608] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c49ffe2f-069f-4251-9349-53b1ba685065 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "d0cadaac-07dd-4478-a83e-80ba46d103b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.112s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.631660] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ef4793-1f80-4c65-ac2d-f55cf240bcc3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.640648] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1216.640928] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2f1dcb4-5213-4603-9464-6a4c5d02e5d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.649136] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1216.649136] env[65758]: value = "task-4661407" [ 1216.649136] env[65758]: _type = "Task" [ 1216.649136] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.658075] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.083564] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Releasing lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.116087] env[65758]: DEBUG nova.compute.utils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1217.117719] env[65758]: DEBUG nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1217.117925] env[65758]: DEBUG nova.network.neutron [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1217.118294] env[65758]: WARNING neutronclient.v2_0.client [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1217.118596] env[65758]: WARNING neutronclient.v2_0.client [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1217.119279] env[65758]: WARNING openstack [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1217.119706] env[65758]: WARNING openstack [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1217.161938] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661407, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.192608] env[65758]: DEBUG nova.policy [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec1f0d5eb8304e50b64a102ee8b01a8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a8729d781b1450e9b366785f96f9938', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1217.280015] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "d0cadaac-07dd-4478-a83e-80ba46d103b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.280366] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "d0cadaac-07dd-4478-a83e-80ba46d103b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.280608] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "d0cadaac-07dd-4478-a83e-80ba46d103b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.280807] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "d0cadaac-07dd-4478-a83e-80ba46d103b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.281029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "d0cadaac-07dd-4478-a83e-80ba46d103b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.283337] env[65758]: INFO nova.compute.manager [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Terminating instance [ 1217.477724] env[65758]: DEBUG nova.network.neutron [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Successfully created port: b15c4724-d64a-4321-8c27-5e337f8b9312 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1217.628850] env[65758]: DEBUG nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1217.661872] env[65758]: DEBUG oslo_vmware.api [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661407, 'name': PowerOnVM_Task, 'duration_secs': 0.545889} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.662301] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1217.787881] env[65758]: DEBUG nova.compute.manager [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1217.788165] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1217.789127] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5977f9b3-64cc-4cc8-ae5c-14890385bf03 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.797229] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1217.797497] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8085113-3329-4db5-99ac-cdc3254c8a36 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.804690] env[65758]: DEBUG oslo_vmware.api [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1217.804690] env[65758]: value = "task-4661408" [ 1217.804690] env[65758]: _type = "Task" [ 1217.804690] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.814212] env[65758]: DEBUG oslo_vmware.api [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.317271] env[65758]: DEBUG oslo_vmware.api [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661408, 'name': PowerOffVM_Task, 'duration_secs': 0.227139} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.317793] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1218.317793] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1218.317969] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ba69df7-2374-4bc1-aa15-e8e0f8f73c6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.392437] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1218.392795] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1218.393138] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleting the datastore file [datastore2] d0cadaac-07dd-4478-a83e-80ba46d103b2 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1218.393570] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-953b0c6c-4f9f-4954-812d-21e806b05ddb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.403421] env[65758]: DEBUG oslo_vmware.api [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1218.403421] env[65758]: value = "task-4661410" [ 1218.403421] env[65758]: _type = "Task" [ 1218.403421] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.415380] env[65758]: DEBUG oslo_vmware.api [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.625780] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1218.626172] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9f3cfea-bbf7-4ab4-9bc8-7832db0742a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.634313] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1218.634313] env[65758]: value = "task-4661411" [ 1218.634313] env[65758]: _type = "Task" [ 1218.634313] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.641145] env[65758]: DEBUG nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1218.647771] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.674589] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1218.674925] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1218.675150] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1218.675365] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1218.675520] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1218.675672] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1218.675881] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1218.676103] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1218.676310] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1218.676655] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1218.676864] env[65758]: DEBUG nova.virt.hardware [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1218.681266] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa48ed71-3c03-4abb-b239-20921d6febbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.691349] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5b730f-5103-4df1-8ce6-0a823b107991 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.722945] env[65758]: INFO nova.compute.manager [None req-a7b2e9c5-649c-4cf7-8983-64c5b29cace8 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance to original state: 'active' [ 1218.916203] env[65758]: DEBUG oslo_vmware.api [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251621} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.916499] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1218.916701] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1218.916904] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1218.917098] env[65758]: INFO nova.compute.manager [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1218.917388] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1218.917619] env[65758]: DEBUG nova.compute.manager [-] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1218.917718] env[65758]: DEBUG nova.network.neutron [-] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1218.917994] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1218.918578] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1218.918864] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1218.958976] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1218.964047] env[65758]: DEBUG nova.compute.manager [req-64f439eb-f2f2-4128-a74c-eb70221f834d req-7f15f9ba-bfbc-4911-9b45-f8db71a77c03 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Received event network-vif-plugged-b15c4724-d64a-4321-8c27-5e337f8b9312 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1218.964373] env[65758]: DEBUG oslo_concurrency.lockutils [req-64f439eb-f2f2-4128-a74c-eb70221f834d req-7f15f9ba-bfbc-4911-9b45-f8db71a77c03 service nova] Acquiring lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.965137] env[65758]: DEBUG oslo_concurrency.lockutils [req-64f439eb-f2f2-4128-a74c-eb70221f834d req-7f15f9ba-bfbc-4911-9b45-f8db71a77c03 service nova] Lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.965349] env[65758]: DEBUG oslo_concurrency.lockutils [req-64f439eb-f2f2-4128-a74c-eb70221f834d req-7f15f9ba-bfbc-4911-9b45-f8db71a77c03 service nova] Lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.965565] env[65758]: DEBUG nova.compute.manager [req-64f439eb-f2f2-4128-a74c-eb70221f834d req-7f15f9ba-bfbc-4911-9b45-f8db71a77c03 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] No waiting events found dispatching network-vif-plugged-b15c4724-d64a-4321-8c27-5e337f8b9312 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1218.965793] env[65758]: WARNING nova.compute.manager [req-64f439eb-f2f2-4128-a74c-eb70221f834d req-7f15f9ba-bfbc-4911-9b45-f8db71a77c03 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Received unexpected event network-vif-plugged-b15c4724-d64a-4321-8c27-5e337f8b9312 for instance with vm_state building and task_state spawning. [ 1219.059272] env[65758]: DEBUG nova.network.neutron [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Successfully updated port: b15c4724-d64a-4321-8c27-5e337f8b9312 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1219.148851] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661411, 'name': PowerOffVM_Task, 'duration_secs': 0.337284} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.148851] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1219.148851] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789c69bf-5ab3-4681-ab66-61e4d4cad050 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.172979] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5292a6bb-cccf-400d-b8b0-202d17ba7bc2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.212690] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1219.213047] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4f27e07-b180-421a-967b-310c2253f1f5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.222569] env[65758]: DEBUG nova.compute.manager [req-df8861ca-a50f-4fa0-b4a8-136d44263db3 req-16a892b9-126c-4527-83b3-425ed16aab7d service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Received event network-vif-deleted-b35c5d07-8b04-492b-961d-93f05229c072 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1219.222871] env[65758]: INFO nova.compute.manager [req-df8861ca-a50f-4fa0-b4a8-136d44263db3 req-16a892b9-126c-4527-83b3-425ed16aab7d service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Neutron deleted interface b35c5d07-8b04-492b-961d-93f05229c072; detaching it from the instance and deleting it from the info cache [ 1219.223155] env[65758]: DEBUG nova.network.neutron [req-df8861ca-a50f-4fa0-b4a8-136d44263db3 req-16a892b9-126c-4527-83b3-425ed16aab7d service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1219.226134] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1219.226134] env[65758]: value = "task-4661412" [ 1219.226134] env[65758]: _type = "Task" [ 1219.226134] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.242299] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1219.242539] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1219.242800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.242949] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1219.243164] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1219.243440] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c80da08-f89e-4962-9e8d-61c531907214 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.260056] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1219.260270] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1219.261066] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9be0775-4c36-4c5c-ade5-d2c39ff526c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.268464] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1219.268464] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52981d46-c132-cb69-2ec0-e357629226d5" [ 1219.268464] env[65758]: _type = "Task" [ 1219.268464] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.277744] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52981d46-c132-cb69-2ec0-e357629226d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.564652] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.564983] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1219.565197] env[65758]: DEBUG nova.network.neutron [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1219.678402] env[65758]: DEBUG nova.network.neutron [-] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1219.727676] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51ececf3-d10c-4614-afd4-671f10ebb81e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.738064] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6aade1-007e-4c70-8381-94ca5b10e645 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.773640] env[65758]: DEBUG nova.compute.manager [req-df8861ca-a50f-4fa0-b4a8-136d44263db3 req-16a892b9-126c-4527-83b3-425ed16aab7d service nova] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Detach interface failed, port_id=b35c5d07-8b04-492b-961d-93f05229c072, reason: Instance d0cadaac-07dd-4478-a83e-80ba46d103b2 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1219.784497] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52981d46-c132-cb69-2ec0-e357629226d5, 'name': SearchDatastore_Task, 'duration_secs': 0.014569} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.785618] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-442db0bd-387b-4164-b2f7-07cb66a8d924 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.791719] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1219.791719] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]524eee2f-6b1c-20a5-a1f8-48d88f85f325" [ 1219.791719] env[65758]: _type = "Task" [ 1219.791719] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.800958] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524eee2f-6b1c-20a5-a1f8-48d88f85f325, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.011836] env[65758]: DEBUG oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa4479-0e17-bb8b-c995-bb95a4e78b44/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1220.012943] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3988c1-71f8-4634-9014-044fb25149d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.019672] env[65758]: DEBUG oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa4479-0e17-bb8b-c995-bb95a4e78b44/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1220.019833] env[65758]: ERROR oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa4479-0e17-bb8b-c995-bb95a4e78b44/disk-0.vmdk due to incomplete transfer. [ 1220.020118] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f669b85e-8fde-4892-b092-31612add45b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.028418] env[65758]: DEBUG oslo_vmware.rw_handles [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa4479-0e17-bb8b-c995-bb95a4e78b44/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1220.028639] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Uploaded image 81bd8cd1-7783-4705-af00-19222b217ece to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1220.031643] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1220.032720] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-96dd0c08-7969-4a9c-9c4e-75333bdf7879 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.041428] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1220.041428] env[65758]: value = "task-4661413" [ 1220.041428] env[65758]: _type = "Task" [ 1220.041428] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.050485] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661413, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.068877] env[65758]: WARNING openstack [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1220.069310] env[65758]: WARNING openstack [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1220.103951] env[65758]: DEBUG nova.network.neutron [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1220.116450] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.116718] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.116927] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.117126] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.117301] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.122997] env[65758]: INFO nova.compute.manager [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Terminating instance [ 1220.175489] env[65758]: WARNING neutronclient.v2_0.client [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1220.176193] env[65758]: WARNING openstack [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1220.176557] env[65758]: WARNING openstack [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1220.184447] env[65758]: INFO nova.compute.manager [-] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Took 1.27 seconds to deallocate network for instance. [ 1220.269755] env[65758]: DEBUG nova.network.neutron [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Updating instance_info_cache with network_info: [{"id": "b15c4724-d64a-4321-8c27-5e337f8b9312", "address": "fa:16:3e:73:99:0a", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15c4724-d6", "ovs_interfaceid": "b15c4724-d64a-4321-8c27-5e337f8b9312", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1220.308928] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]524eee2f-6b1c-20a5-a1f8-48d88f85f325, 'name': SearchDatastore_Task, 'duration_secs': 0.015162} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.309351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.309759] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. {{(pid=65758) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1220.310065] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a93518c0-15fb-4863-833c-3a253bbad5a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.319687] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1220.319687] env[65758]: value = "task-4661414" [ 1220.319687] env[65758]: _type = "Task" [ 1220.319687] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.329576] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.554240] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661413, 'name': Destroy_Task, 'duration_secs': 0.351615} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.554531] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Destroyed the VM [ 1220.554771] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1220.555136] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-70f9aad1-e7a2-4794-b6e3-6aabd4f5745a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.563862] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1220.563862] env[65758]: value = "task-4661415" [ 1220.563862] env[65758]: _type = "Task" [ 1220.563862] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.573354] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661415, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.630176] env[65758]: DEBUG nova.compute.manager [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1220.630534] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1220.630914] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7670dcbb-4e98-4c49-a4ee-55c0eebfc5db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.640498] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1220.640498] env[65758]: value = "task-4661416" [ 1220.640498] env[65758]: _type = "Task" [ 1220.640498] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.651543] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.692226] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.692577] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.692815] env[65758]: DEBUG nova.objects.instance [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lazy-loading 'resources' on Instance uuid d0cadaac-07dd-4478-a83e-80ba46d103b2 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.773461] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.773887] env[65758]: DEBUG nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Instance network_info: |[{"id": "b15c4724-d64a-4321-8c27-5e337f8b9312", "address": "fa:16:3e:73:99:0a", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15c4724-d6", "ovs_interfaceid": "b15c4724-d64a-4321-8c27-5e337f8b9312", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1220.774433] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:99:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd22cb4ec-277f-41ee-8aba-b3d54442b93d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b15c4724-d64a-4321-8c27-5e337f8b9312', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1220.783590] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Creating folder: Project (0a8729d781b1450e9b366785f96f9938). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1220.783926] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18d48f9a-e85b-45b9-956c-2c3d993bc819 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.797139] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Created folder: Project (0a8729d781b1450e9b366785f96f9938) in parent group-v909763. [ 1220.797353] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Creating folder: Instances. Parent ref: group-v910081. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1220.797601] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a82a3e8e-79c8-47a5-9b41-6f5f1466501e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.809018] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Created folder: Instances in parent group-v910081. [ 1220.809359] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1220.809583] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1220.809875] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6516d3d1-458c-4702-86a2-cd2e409e7a15 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.836657] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455628} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.838129] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. [ 1220.838432] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1220.838432] env[65758]: value = "task-4661419" [ 1220.838432] env[65758]: _type = "Task" [ 1220.838432] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.839179] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5b3fc5-3f39-4646-8244-48c879bdfd74 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.851569] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661419, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.873387] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1220.873772] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2491ddec-50a9-4a50-8bf5-429ac702217a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.896016] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1220.896016] env[65758]: value = "task-4661420" [ 1220.896016] env[65758]: _type = "Task" [ 1220.896016] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.906847] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661420, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.992472] env[65758]: DEBUG nova.compute.manager [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Received event network-changed-b15c4724-d64a-4321-8c27-5e337f8b9312 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1220.992791] env[65758]: DEBUG nova.compute.manager [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Refreshing instance network info cache due to event network-changed-b15c4724-d64a-4321-8c27-5e337f8b9312. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1220.993388] env[65758]: DEBUG oslo_concurrency.lockutils [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] Acquiring lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.993488] env[65758]: DEBUG oslo_concurrency.lockutils [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] Acquired lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.993722] env[65758]: DEBUG nova.network.neutron [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Refreshing network info cache for port b15c4724-d64a-4321-8c27-5e337f8b9312 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1221.078750] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661415, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.152115] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661416, 'name': PowerOffVM_Task, 'duration_secs': 0.204707} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.152395] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1221.152595] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1221.152780] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910066', 'volume_id': '760cbc35-0376-4e51-a795-3bea9254770b', 'name': 'volume-760cbc35-0376-4e51-a795-3bea9254770b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'ba16e0fe-6748-4d14-bb28-a65d63a2274d', 'attached_at': '2025-11-21T13:24:27.000000', 'detached_at': '', 'volume_id': '760cbc35-0376-4e51-a795-3bea9254770b', 'serial': '760cbc35-0376-4e51-a795-3bea9254770b'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1221.153733] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185053bf-b0b3-41dc-ab73-6c506dd155b3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.177550] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c89974-5499-452e-ad6f-b46eb3ee4b57 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.185749] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8045d380-6e7c-4823-b429-10fabba66ad8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.209966] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad13acc-c366-467c-ad80-4bc8f7a06ae3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.225351] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The volume has not been displaced from its original location: [datastore2] volume-760cbc35-0376-4e51-a795-3bea9254770b/volume-760cbc35-0376-4e51-a795-3bea9254770b.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1221.230500] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1221.233303] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82fb0a33-0d8d-4194-ab75-b05179050461 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.252318] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1221.252318] env[65758]: value = "task-4661421" [ 1221.252318] env[65758]: _type = "Task" [ 1221.252318] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.263803] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661421, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.351874] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661419, 'name': CreateVM_Task, 'duration_secs': 0.366459} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.352916] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1221.353687] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3ff018-d9c0-4c51-a4e7-1699fd85d114 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.356434] env[65758]: WARNING neutronclient.v2_0.client [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1221.356828] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.356990] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.357313] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1221.357603] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19af9865-e798-4e0b-b7f6-47f202289864 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.365174] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99401f3-02b0-4a40-a180-1987f3026a65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.368655] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1221.368655] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e29b00-b356-0142-d514-84752281d8dc" [ 1221.368655] env[65758]: _type = "Task" [ 1221.368655] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.406428] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae18877-3e75-46c8-be97-41f5a7117c61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.409287] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e29b00-b356-0142-d514-84752281d8dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009852} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.409519] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.409745] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1221.409964] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.410118] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.410294] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1221.411166] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-651c4594-138a-4e37-89f0-c2b0566341b3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.418426] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661420, 'name': ReconfigVM_Task, 'duration_secs': 0.305143} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.419646] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff00a47-711a-45ae-a73c-ab0c7fb3911f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.424114] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Reconfigured VM instance instance-00000071 to attach disk [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1221.425896] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7c0ae5-26bc-4f45-9b0b-c0ba3c2cd06c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.429292] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1221.429469] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1221.430519] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65c7270e-ab91-41bf-973b-9630490fc9cc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.441546] env[65758]: DEBUG nova.compute.provider_tree [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.467989] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1221.467989] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528bedef-6944-a5ea-698a-c944d1bcb276" [ 1221.467989] env[65758]: _type = "Task" [ 1221.467989] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.469063] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51818d59-dd36-4b2f-a756-e492f81ecc70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.490091] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528bedef-6944-a5ea-698a-c944d1bcb276, 'name': SearchDatastore_Task, 'duration_secs': 0.010747} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.492021] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1221.492021] env[65758]: value = "task-4661422" [ 1221.492021] env[65758]: _type = "Task" [ 1221.492021] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.492287] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b671bf20-c4f5-4e24-b2cc-a56c166539e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.499740] env[65758]: WARNING neutronclient.v2_0.client [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1221.500433] env[65758]: WARNING openstack [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1221.500839] env[65758]: WARNING openstack [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1221.510263] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1221.510263] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ad655e-425a-2c54-d628-ec4bda70d884" [ 1221.510263] env[65758]: _type = "Task" [ 1221.510263] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.517181] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661422, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.523468] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ad655e-425a-2c54-d628-ec4bda70d884, 'name': SearchDatastore_Task, 'duration_secs': 0.009623} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.523688] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.523928] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25/0f3ae822-4c4c-4dff-94d4-3416187d6d25.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1221.524597] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48a9dc61-bea8-4ab9-9ecd-9f85f00fa287 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.533206] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1221.533206] env[65758]: value = "task-4661423" [ 1221.533206] env[65758]: _type = "Task" [ 1221.533206] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.542357] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.580830] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661415, 'name': RemoveSnapshot_Task, 'duration_secs': 0.544045} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.581318] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1221.581637] env[65758]: DEBUG nova.compute.manager [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1221.588207] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef830f3-b2ad-41d4-bffb-a8aecf0f5552 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.656591] env[65758]: WARNING neutronclient.v2_0.client [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1221.657449] env[65758]: WARNING openstack [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1221.657841] env[65758]: WARNING openstack [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1221.745842] env[65758]: DEBUG nova.network.neutron [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Updated VIF entry in instance network info cache for port b15c4724-d64a-4321-8c27-5e337f8b9312. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1221.746366] env[65758]: DEBUG nova.network.neutron [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Updating instance_info_cache with network_info: [{"id": "b15c4724-d64a-4321-8c27-5e337f8b9312", "address": "fa:16:3e:73:99:0a", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15c4724-d6", "ovs_interfaceid": "b15c4724-d64a-4321-8c27-5e337f8b9312", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1221.765293] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661421, 'name': ReconfigVM_Task, 'duration_secs': 0.347812} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.765623] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1221.771228] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68a0b729-09d5-46ec-8e70-36687c0d2c0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.789140] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1221.789140] env[65758]: value = "task-4661424" [ 1221.789140] env[65758]: _type = "Task" [ 1221.789140] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.945495] env[65758]: DEBUG nova.scheduler.client.report [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1222.007228] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661422, 'name': ReconfigVM_Task, 'duration_secs': 0.297878} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.007478] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1222.007753] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-714f1a9f-77a2-4728-b5b9-379891d65cb3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.015688] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1222.015688] env[65758]: value = "task-4661425" [ 1222.015688] env[65758]: _type = "Task" [ 1222.015688] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.025314] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661425, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.045970] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661423, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490407} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.046250] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25/0f3ae822-4c4c-4dff-94d4-3416187d6d25.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1222.046460] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1222.046717] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b97abe4-60dd-4155-97c5-d30577302c66 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.053518] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1222.053518] env[65758]: value = "task-4661426" [ 1222.053518] env[65758]: _type = "Task" [ 1222.053518] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.061280] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661426, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.102190] env[65758]: INFO nova.compute.manager [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Shelve offloading [ 1222.249685] env[65758]: DEBUG oslo_concurrency.lockutils [req-c5703633-2500-4bf6-b7c3-673fe0a448d9 req-3273e243-68b1-46f8-a600-b86c50e32744 service nova] Releasing lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.300816] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.451664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.469840] env[65758]: INFO nova.scheduler.client.report [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted allocations for instance d0cadaac-07dd-4478-a83e-80ba46d103b2 [ 1222.530090] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661425, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.566029] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661426, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.605689] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1222.606105] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef5d1d4b-e012-441f-8fa7-060713866be2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.615306] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1222.615306] env[65758]: value = "task-4661427" [ 1222.615306] env[65758]: _type = "Task" [ 1222.615306] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.627184] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1222.627184] env[65758]: DEBUG nova.compute.manager [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1222.627932] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81576ee7-25fa-480a-bbef-4c53e6adffae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.634799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.634902] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.635120] env[65758]: DEBUG nova.network.neutron [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1222.801085] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.977878] env[65758]: DEBUG oslo_concurrency.lockutils [None req-7ca84c7d-f42f-456f-a4e1-eb2a62a6cd58 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "d0cadaac-07dd-4478-a83e-80ba46d103b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.697s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1223.027870] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661425, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.064838] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661426, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.7984} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.065165] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1223.066059] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a8e809-19be-405c-9d4a-0495e3f850d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.089555] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25/0f3ae822-4c4c-4dff-94d4-3416187d6d25.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1223.089857] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d18b436c-8ae2-41fa-8df5-39e24ad3b092 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.110943] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1223.110943] env[65758]: value = "task-4661428" [ 1223.110943] env[65758]: _type = "Task" [ 1223.110943] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.120106] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661428, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.138574] env[65758]: WARNING neutronclient.v2_0.client [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1223.139361] env[65758]: WARNING openstack [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1223.139725] env[65758]: WARNING openstack [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1223.303608] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661424, 'name': ReconfigVM_Task, 'duration_secs': 1.125126} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.303608] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910066', 'volume_id': '760cbc35-0376-4e51-a795-3bea9254770b', 'name': 'volume-760cbc35-0376-4e51-a795-3bea9254770b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'ba16e0fe-6748-4d14-bb28-a65d63a2274d', 'attached_at': '2025-11-21T13:24:27.000000', 'detached_at': '', 'volume_id': '760cbc35-0376-4e51-a795-3bea9254770b', 'serial': '760cbc35-0376-4e51-a795-3bea9254770b'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1223.303608] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1223.303911] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92c070a-28fd-4c3d-b73a-7c64b2e018ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.312336] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1223.312627] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92305686-4753-49f6-b49a-f84cdad90bac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.327830] env[65758]: WARNING neutronclient.v2_0.client [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1223.328606] env[65758]: WARNING openstack [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1223.329018] env[65758]: WARNING openstack [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1223.392722] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1223.393072] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1223.393396] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleting the datastore file [datastore1] ba16e0fe-6748-4d14-bb28-a65d63a2274d {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1223.393792] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb4e4ada-3228-4fb3-9554-9cb25eeaaa13 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.402556] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1223.402556] env[65758]: value = "task-4661430" [ 1223.402556] env[65758]: _type = "Task" [ 1223.402556] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.411901] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.417967] env[65758]: DEBUG nova.network.neutron [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [{"id": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "address": "fa:16:3e:9d:65:b5", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dcfa87-c0", "ovs_interfaceid": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1223.529665] env[65758]: DEBUG oslo_vmware.api [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661425, 'name': PowerOnVM_Task, 'duration_secs': 1.127206} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.529977] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1223.532787] env[65758]: DEBUG nova.compute.manager [None req-f907762f-b470-474c-8dfa-333123991e57 tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1223.533676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79faf4dc-7e7f-4cda-86d7-81c23f81019b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.620918] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661428, 'name': ReconfigVM_Task, 'duration_secs': 0.326022} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.621275] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25/0f3ae822-4c4c-4dff-94d4-3416187d6d25.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.621834] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c3e2017-12dc-4e3a-ad9c-220c9f59159b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.629224] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1223.629224] env[65758]: value = "task-4661431" [ 1223.629224] env[65758]: _type = "Task" [ 1223.629224] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.640623] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661431, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.913473] env[65758]: DEBUG oslo_vmware.api [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159123} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.913756] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.913942] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.914130] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.914304] env[65758]: INFO nova.compute.manager [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Took 3.28 seconds to destroy the instance on the hypervisor. [ 1223.914540] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1223.914734] env[65758]: DEBUG nova.compute.manager [-] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1223.914830] env[65758]: DEBUG nova.network.neutron [-] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1223.915109] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1223.915641] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1223.915893] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1223.922663] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.922987] env[65758]: WARNING neutronclient.v2_0.client [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1223.923562] env[65758]: WARNING openstack [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1223.923902] env[65758]: WARNING openstack [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1223.929024] env[65758]: WARNING neutronclient.v2_0.client [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1223.962039] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1224.140267] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661431, 'name': Rename_Task, 'duration_secs': 0.153737} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.140618] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1224.140836] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27b1c06e-596c-4e10-b6c7-608e62b8bf66 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.148675] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1224.148675] env[65758]: value = "task-4661432" [ 1224.148675] env[65758]: _type = "Task" [ 1224.148675] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.163513] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.218605] env[65758]: DEBUG nova.compute.manager [req-ee7e2bb4-fc4c-4c9e-bfb3-7368cf624abc req-28d9fa6a-55a6-4b9a-8f4a-4596c382dbac service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received event network-vif-unplugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1224.218834] env[65758]: DEBUG oslo_concurrency.lockutils [req-ee7e2bb4-fc4c-4c9e-bfb3-7368cf624abc req-28d9fa6a-55a6-4b9a-8f4a-4596c382dbac service nova] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.219057] env[65758]: DEBUG oslo_concurrency.lockutils [req-ee7e2bb4-fc4c-4c9e-bfb3-7368cf624abc req-28d9fa6a-55a6-4b9a-8f4a-4596c382dbac service nova] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.219242] env[65758]: DEBUG oslo_concurrency.lockutils [req-ee7e2bb4-fc4c-4c9e-bfb3-7368cf624abc req-28d9fa6a-55a6-4b9a-8f4a-4596c382dbac service nova] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.219404] env[65758]: DEBUG nova.compute.manager [req-ee7e2bb4-fc4c-4c9e-bfb3-7368cf624abc req-28d9fa6a-55a6-4b9a-8f4a-4596c382dbac service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] No waiting events found dispatching network-vif-unplugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1224.219570] env[65758]: WARNING nova.compute.manager [req-ee7e2bb4-fc4c-4c9e-bfb3-7368cf624abc req-28d9fa6a-55a6-4b9a-8f4a-4596c382dbac service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received unexpected event network-vif-unplugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b for instance with vm_state shelved and task_state shelving_offloading. [ 1224.316324] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1224.317446] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6a7a05-583a-4d99-8129-c40da5dc8d9f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.326128] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1224.326889] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c08ed28d-7d41-4803-b232-c6793b66b245 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.394802] env[65758]: DEBUG nova.compute.manager [req-0c74411b-54ba-4fc9-aaeb-ff9cb4135142 req-f0fb8a56-0e41-4433-8b68-db907bd1c2e5 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Received event network-vif-deleted-b91df992-11ae-4d37-af24-380860864b45 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1224.394853] env[65758]: INFO nova.compute.manager [req-0c74411b-54ba-4fc9-aaeb-ff9cb4135142 req-f0fb8a56-0e41-4433-8b68-db907bd1c2e5 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Neutron deleted interface b91df992-11ae-4d37-af24-380860864b45; detaching it from the instance and deleting it from the info cache [ 1224.395007] env[65758]: DEBUG nova.network.neutron [req-0c74411b-54ba-4fc9-aaeb-ff9cb4135142 req-f0fb8a56-0e41-4433-8b68-db907bd1c2e5 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1224.398137] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1224.398269] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1224.398505] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleting the datastore file [datastore2] bc10286b-195f-48a2-b16c-f8f925ec7a2a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1224.399015] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-736fbfed-0f64-40c5-b357-899f15e95042 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.407040] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1224.407040] env[65758]: value = "task-4661434" [ 1224.407040] env[65758]: _type = "Task" [ 1224.407040] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.417257] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.576736] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.576736] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.661737] env[65758]: DEBUG oslo_vmware.api [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661432, 'name': PowerOnVM_Task, 'duration_secs': 0.487914} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.662035] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1224.662238] env[65758]: INFO nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Took 6.02 seconds to spawn the instance on the hypervisor. [ 1224.662420] env[65758]: DEBUG nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1224.663267] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0898e10e-5705-4ba7-a580-22c3ae9db601 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.873275] env[65758]: DEBUG nova.network.neutron [-] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1224.899850] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76441a3d-3dc4-4c73-92f3-c447781f6128 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.916114] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b908638-87ac-427a-b5c3-777f30597292 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.931619] env[65758]: DEBUG oslo_vmware.api [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150261} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.932361] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1224.932561] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1224.932753] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1224.952360] env[65758]: DEBUG nova.compute.manager [req-0c74411b-54ba-4fc9-aaeb-ff9cb4135142 req-f0fb8a56-0e41-4433-8b68-db907bd1c2e5 service nova] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Detach interface failed, port_id=b91df992-11ae-4d37-af24-380860864b45, reason: Instance ba16e0fe-6748-4d14-bb28-a65d63a2274d could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1224.954639] env[65758]: INFO nova.scheduler.client.report [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted allocations for instance bc10286b-195f-48a2-b16c-f8f925ec7a2a [ 1225.079172] env[65758]: DEBUG nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1225.184241] env[65758]: INFO nova.compute.manager [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Took 10.86 seconds to build instance. [ 1225.376207] env[65758]: INFO nova.compute.manager [-] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Took 1.46 seconds to deallocate network for instance. [ 1225.461817] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.461878] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.462175] env[65758]: DEBUG nova.objects.instance [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'resources' on Instance uuid bc10286b-195f-48a2-b16c-f8f925ec7a2a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1225.588366] env[65758]: INFO nova.compute.manager [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Rescuing [ 1225.588631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.588828] env[65758]: DEBUG oslo_concurrency.lockutils [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.589041] env[65758]: DEBUG nova.network.neutron [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1225.604391] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.686549] env[65758]: DEBUG oslo_concurrency.lockutils [None req-71ea25bb-2cd5-40b1-8cd9-081cee4bb496 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.374s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.926901] env[65758]: INFO nova.compute.manager [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Took 0.55 seconds to detach 1 volumes for instance. [ 1225.965772] env[65758]: DEBUG nova.objects.instance [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'numa_topology' on Instance uuid bc10286b-195f-48a2-b16c-f8f925ec7a2a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.092995] env[65758]: WARNING neutronclient.v2_0.client [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1226.093409] env[65758]: WARNING openstack [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1226.093761] env[65758]: WARNING openstack [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1226.247188] env[65758]: WARNING neutronclient.v2_0.client [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1226.247944] env[65758]: WARNING openstack [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1226.248342] env[65758]: WARNING openstack [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1226.339707] env[65758]: DEBUG nova.network.neutron [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Updating instance_info_cache with network_info: [{"id": "b15c4724-d64a-4321-8c27-5e337f8b9312", "address": "fa:16:3e:73:99:0a", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15c4724-d6", "ovs_interfaceid": "b15c4724-d64a-4321-8c27-5e337f8b9312", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1226.432853] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.468263] env[65758]: DEBUG nova.objects.base [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1226.583464] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a68f35-e6b2-4ff2-a57e-7a4cfcaf317b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.591767] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c5c310-9bb6-432a-bd67-93d19183187a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.624108] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0277f385-9420-4adf-a21f-6f3de735cc05 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.632543] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea42301-2291-4bd4-acdd-f749269a58f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.646262] env[65758]: DEBUG nova.compute.provider_tree [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.810239] env[65758]: DEBUG nova.compute.manager [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received event network-changed-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1226.810510] env[65758]: DEBUG nova.compute.manager [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Refreshing instance network info cache due to event network-changed-f3dcfa87-c097-4b94-bab6-e9fd7455605b. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1226.810722] env[65758]: DEBUG oslo_concurrency.lockutils [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Acquiring lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.810904] env[65758]: DEBUG oslo_concurrency.lockutils [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Acquired lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.811143] env[65758]: DEBUG nova.network.neutron [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Refreshing network info cache for port f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1226.826868] env[65758]: DEBUG nova.compute.manager [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Received event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1226.826946] env[65758]: DEBUG nova.compute.manager [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing instance network info cache due to event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1226.827765] env[65758]: DEBUG oslo_concurrency.lockutils [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] Acquiring lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.827765] env[65758]: DEBUG oslo_concurrency.lockutils [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] Acquired lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.827765] env[65758]: DEBUG nova.network.neutron [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1226.843184] env[65758]: DEBUG oslo_concurrency.lockutils [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "refresh_cache-0f3ae822-4c4c-4dff-94d4-3416187d6d25" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.150112] env[65758]: DEBUG nova.scheduler.client.report [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1227.314093] env[65758]: WARNING neutronclient.v2_0.client [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1227.314971] env[65758]: WARNING openstack [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1227.315805] env[65758]: WARNING openstack [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1227.330355] env[65758]: WARNING neutronclient.v2_0.client [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1227.330996] env[65758]: WARNING openstack [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1227.331365] env[65758]: WARNING openstack [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1227.501918] env[65758]: WARNING neutronclient.v2_0.client [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1227.502838] env[65758]: WARNING openstack [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1227.503173] env[65758]: WARNING openstack [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1227.542525] env[65758]: WARNING neutronclient.v2_0.client [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1227.543241] env[65758]: WARNING openstack [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1227.543593] env[65758]: WARNING openstack [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1227.598387] env[65758]: DEBUG nova.network.neutron [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updated VIF entry in instance network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1227.598849] env[65758]: DEBUG nova.network.neutron [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [{"id": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "address": "fa:16:3e:5a:2e:66", "network": {"id": "4cc217d8-93be-4512-ae15-e75bcfb83095", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-70005608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f3dc40c4af744624b6c320390d0cb210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d310661-ff", "ovs_interfaceid": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1227.636631] env[65758]: DEBUG nova.network.neutron [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updated VIF entry in instance network info cache for port f3dcfa87-c097-4b94-bab6-e9fd7455605b. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1227.637064] env[65758]: DEBUG nova.network.neutron [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [{"id": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "address": "fa:16:3e:9d:65:b5", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf3dcfa87-c0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1227.654590] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.657129] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.053s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.658571] env[65758]: INFO nova.compute.claims [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1228.101971] env[65758]: DEBUG oslo_concurrency.lockutils [req-611f99bb-c238-4f03-a790-a997d4a1eabd req-cbb1b40b-0763-428b-9a60-bc800ef18213 service nova] Releasing lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.107950] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.139624] env[65758]: DEBUG oslo_concurrency.lockutils [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Releasing lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.139960] env[65758]: DEBUG nova.compute.manager [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Received event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1228.140146] env[65758]: DEBUG nova.compute.manager [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing instance network info cache due to event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1228.140358] env[65758]: DEBUG oslo_concurrency.lockutils [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Acquiring lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.140493] env[65758]: DEBUG oslo_concurrency.lockutils [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Acquired lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.140709] env[65758]: DEBUG nova.network.neutron [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1228.168076] env[65758]: DEBUG oslo_concurrency.lockutils [None req-685fffe1-0c6c-4e52-a285-16615b6e6245 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.255s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.168076] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.060s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.168490] env[65758]: INFO nova.compute.manager [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Unshelving [ 1228.386396] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1228.386762] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72a80037-5a13-4123-989f-584b277ec7b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.395197] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1228.395197] env[65758]: value = "task-4661435" [ 1228.395197] env[65758]: _type = "Task" [ 1228.395197] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.404639] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.643690] env[65758]: WARNING neutronclient.v2_0.client [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1228.644398] env[65758]: WARNING openstack [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1228.644755] env[65758]: WARNING openstack [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1228.732404] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "d1918f85-d122-4a84-88b3-f038e8c1149e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.732651] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "d1918f85-d122-4a84-88b3-f038e8c1149e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.732843] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "d1918f85-d122-4a84-88b3-f038e8c1149e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.733027] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "d1918f85-d122-4a84-88b3-f038e8c1149e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.733200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "d1918f85-d122-4a84-88b3-f038e8c1149e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.735200] env[65758]: INFO nova.compute.manager [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Terminating instance [ 1228.783035] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbded8d-376e-4b0c-a6d8-0bee23f2749e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.787434] env[65758]: WARNING neutronclient.v2_0.client [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1228.788093] env[65758]: WARNING openstack [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1228.788502] env[65758]: WARNING openstack [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1228.801046] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59edf04a-dd1e-4981-96d1-c45399c078c5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.841508] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c76c7ac-4723-4103-9357-426763fb86ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.844426] env[65758]: DEBUG nova.compute.manager [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Received event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1228.844426] env[65758]: DEBUG nova.compute.manager [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing instance network info cache due to event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1228.844426] env[65758]: DEBUG oslo_concurrency.lockutils [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Acquiring lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.851066] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4e15e3-21f3-41d4-ba7d-f8c71cc2eece {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.866366] env[65758]: DEBUG nova.compute.provider_tree [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.886562] env[65758]: DEBUG nova.network.neutron [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updated VIF entry in instance network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1228.886979] env[65758]: DEBUG nova.network.neutron [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [{"id": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "address": "fa:16:3e:5a:2e:66", "network": {"id": "4cc217d8-93be-4512-ae15-e75bcfb83095", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-70005608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f3dc40c4af744624b6c320390d0cb210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d310661-ff", "ovs_interfaceid": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1228.906749] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661435, 'name': PowerOffVM_Task, 'duration_secs': 0.27133} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.907045] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1228.907893] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e657bce3-ec74-44e4-9955-23aef4050ed7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.932164] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ea886d-c2d2-4884-b8bd-6504f058d902 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.967038] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1228.967381] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba0517d3-91c5-4216-80b7-c12a181583bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.975317] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1228.975317] env[65758]: value = "task-4661436" [ 1228.975317] env[65758]: _type = "Task" [ 1228.975317] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.985122] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661436, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.197763] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.242250] env[65758]: DEBUG nova.compute.manager [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1229.242540] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1229.243486] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d218727-428f-4f9d-870f-dd8c55d4521c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.252320] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1229.252628] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0dc2711-4359-4a30-99dd-8b8ad8350668 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.261501] env[65758]: DEBUG oslo_vmware.api [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1229.261501] env[65758]: value = "task-4661437" [ 1229.261501] env[65758]: _type = "Task" [ 1229.261501] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.272104] env[65758]: DEBUG oslo_vmware.api [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.369887] env[65758]: DEBUG nova.scheduler.client.report [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1229.389867] env[65758]: DEBUG oslo_concurrency.lockutils [req-45a6c7c4-a380-4d23-8f2a-5c4e8c84edc5 req-508b39d3-4056-431a-9b1f-185c21e46d21 service nova] Releasing lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.390369] env[65758]: DEBUG oslo_concurrency.lockutils [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Acquired lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.390556] env[65758]: DEBUG nova.network.neutron [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1229.435824] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "1f773924-74ee-4151-81ba-d105ce225289" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.436196] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "1f773924-74ee-4151-81ba-d105ce225289" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.436436] env[65758]: DEBUG nova.compute.manager [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1229.437438] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e829af1-a76f-4011-9162-b9242c67839e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.446226] env[65758]: DEBUG nova.compute.manager [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1229.446819] env[65758]: DEBUG nova.objects.instance [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'flavor' on Instance uuid 1f773924-74ee-4151-81ba-d105ce225289 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.487027] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1229.487294] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1229.487567] env[65758]: DEBUG oslo_concurrency.lockutils [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.487750] env[65758]: DEBUG oslo_concurrency.lockutils [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.487967] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1229.488275] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccfca1f0-4c41-47c0-bd2e-c188452c8411 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.498658] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1229.498852] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1229.499590] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd35af58-747d-480e-834d-8ca8b2ccbe94 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.506053] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1229.506053] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5279923d-e80d-9c1f-e37f-8d012c924e93" [ 1229.506053] env[65758]: _type = "Task" [ 1229.506053] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.514831] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5279923d-e80d-9c1f-e37f-8d012c924e93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.771948] env[65758]: DEBUG oslo_vmware.api [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661437, 'name': PowerOffVM_Task, 'duration_secs': 0.207577} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.772213] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1229.772398] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1229.772741] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a204288-45f3-4b32-b19d-182187e24608 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.832712] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1229.832943] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1229.833171] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Deleting the datastore file [datastore2] d1918f85-d122-4a84-88b3-f038e8c1149e {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1229.833462] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbcae13f-3bcf-487d-a319-8568feedea74 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.840341] env[65758]: DEBUG oslo_vmware.api [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for the task: (returnval){ [ 1229.840341] env[65758]: value = "task-4661439" [ 1229.840341] env[65758]: _type = "Task" [ 1229.840341] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.848509] env[65758]: DEBUG oslo_vmware.api [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.875789] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.876354] env[65758]: DEBUG nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1229.879127] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.447s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.879818] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.881423] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.684s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.881725] env[65758]: DEBUG nova.objects.instance [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'pci_requests' on Instance uuid bc10286b-195f-48a2-b16c-f8f925ec7a2a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.893629] env[65758]: WARNING neutronclient.v2_0.client [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1229.894402] env[65758]: WARNING openstack [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1229.894779] env[65758]: WARNING openstack [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1229.903443] env[65758]: INFO nova.scheduler.client.report [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted allocations for instance ba16e0fe-6748-4d14-bb28-a65d63a2274d [ 1230.021206] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5279923d-e80d-9c1f-e37f-8d012c924e93, 'name': SearchDatastore_Task, 'duration_secs': 0.021827} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.022055] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36615108-325e-4b87-8d0c-a63bbb19771a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.028493] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1230.028493] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5228fef6-ca81-bad0-2d3e-9d6461e8db21" [ 1230.028493] env[65758]: _type = "Task" [ 1230.028493] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.038897] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5228fef6-ca81-bad0-2d3e-9d6461e8db21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.057693] env[65758]: WARNING neutronclient.v2_0.client [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1230.058417] env[65758]: WARNING openstack [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1230.058762] env[65758]: WARNING openstack [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1230.153364] env[65758]: DEBUG nova.network.neutron [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updated VIF entry in instance network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1230.153716] env[65758]: DEBUG nova.network.neutron [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [{"id": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "address": "fa:16:3e:5a:2e:66", "network": {"id": "4cc217d8-93be-4512-ae15-e75bcfb83095", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-70005608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f3dc40c4af744624b6c320390d0cb210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d310661-ff", "ovs_interfaceid": "3d310661-ff77-40cb-b141-66ffbfd71a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1230.350991] env[65758]: DEBUG oslo_vmware.api [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Task: {'id': task-4661439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306325} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.351299] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1230.351480] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1230.351654] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1230.351825] env[65758]: INFO nova.compute.manager [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1230.352091] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1230.352295] env[65758]: DEBUG nova.compute.manager [-] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1230.352392] env[65758]: DEBUG nova.network.neutron [-] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1230.352631] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1230.353189] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1230.353514] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1230.385679] env[65758]: DEBUG nova.compute.utils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1230.389638] env[65758]: DEBUG nova.objects.instance [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'numa_topology' on Instance uuid bc10286b-195f-48a2-b16c-f8f925ec7a2a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.392900] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1230.394891] env[65758]: DEBUG nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1230.395185] env[65758]: DEBUG nova.network.neutron [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1230.395742] env[65758]: WARNING neutronclient.v2_0.client [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1230.395742] env[65758]: WARNING neutronclient.v2_0.client [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1230.396314] env[65758]: WARNING openstack [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1230.397435] env[65758]: WARNING openstack [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1230.404531] env[65758]: INFO nova.compute.claims [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1230.411412] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6e2a3fce-5bdb-49c6-a775-2abc95a9cf59 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "ba16e0fe-6748-4d14-bb28-a65d63a2274d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.295s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.454491] env[65758]: DEBUG nova.policy [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc85d2d1d84f4df0b4de5e6388bb9398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82bfbb5ee6714c9aa5119cb714d28ce2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1230.456914] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1230.457482] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a957e9de-080d-4e08-b46c-533e650c65a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.465604] env[65758]: DEBUG oslo_vmware.api [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1230.465604] env[65758]: value = "task-4661440" [ 1230.465604] env[65758]: _type = "Task" [ 1230.465604] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.474775] env[65758]: DEBUG oslo_vmware.api [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661440, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.539783] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5228fef6-ca81-bad0-2d3e-9d6461e8db21, 'name': SearchDatastore_Task, 'duration_secs': 0.0112} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.540192] env[65758]: DEBUG oslo_concurrency.lockutils [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.540435] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. {{(pid=65758) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1230.540767] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7022420c-d4bf-4d50-a63b-97f71e34f700 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.553754] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1230.553754] env[65758]: value = "task-4661441" [ 1230.553754] env[65758]: _type = "Task" [ 1230.553754] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.564105] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661441, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.657447] env[65758]: DEBUG oslo_concurrency.lockutils [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Releasing lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.659041] env[65758]: DEBUG nova.compute.manager [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Received event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1230.659041] env[65758]: DEBUG nova.compute.manager [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing instance network info cache due to event network-changed-3d310661-ff77-40cb-b141-66ffbfd71a3f. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1230.659041] env[65758]: DEBUG oslo_concurrency.lockutils [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Acquiring lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.659041] env[65758]: DEBUG oslo_concurrency.lockutils [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Acquired lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.659041] env[65758]: DEBUG nova.network.neutron [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Refreshing network info cache for port 3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1230.811818] env[65758]: DEBUG nova.network.neutron [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Successfully created port: c0fe8827-b903-4031-a8be-c5b8a66577af {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1230.892646] env[65758]: DEBUG nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1230.954319] env[65758]: DEBUG nova.compute.manager [req-7b9a5db1-b116-41fd-ae99-c97ad24b19d8 req-75f7fab2-4425-45a5-8c1a-9ea3b4975799 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Received event network-vif-deleted-3d310661-ff77-40cb-b141-66ffbfd71a3f {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1230.954319] env[65758]: INFO nova.compute.manager [req-7b9a5db1-b116-41fd-ae99-c97ad24b19d8 req-75f7fab2-4425-45a5-8c1a-9ea3b4975799 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Neutron deleted interface 3d310661-ff77-40cb-b141-66ffbfd71a3f; detaching it from the instance and deleting it from the info cache [ 1230.954540] env[65758]: DEBUG nova.network.neutron [req-7b9a5db1-b116-41fd-ae99-c97ad24b19d8 req-75f7fab2-4425-45a5-8c1a-9ea3b4975799 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1230.984891] env[65758]: DEBUG oslo_vmware.api [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661440, 'name': PowerOffVM_Task, 'duration_secs': 0.217121} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.984891] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1230.985449] env[65758]: DEBUG nova.compute.manager [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1230.986148] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b5a3f7-706f-4fcf-acfb-055a28c17c0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.065116] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661441, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505468} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.065423] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. [ 1231.066532] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5b30ca-98c6-4133-82e2-f413e667db45 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.093386] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1231.094058] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d55ce7f3-ccb6-4141-b326-f6018eba5b7d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.113962] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1231.113962] env[65758]: value = "task-4661442" [ 1231.113962] env[65758]: _type = "Task" [ 1231.113962] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.123489] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661442, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.123776] env[65758]: DEBUG nova.network.neutron [-] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1231.162998] env[65758]: WARNING neutronclient.v2_0.client [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1231.163745] env[65758]: WARNING openstack [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1231.164128] env[65758]: WARNING openstack [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1231.188718] env[65758]: INFO nova.network.neutron [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Port 3d310661-ff77-40cb-b141-66ffbfd71a3f from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1231.188973] env[65758]: DEBUG nova.network.neutron [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1231.460081] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f93c6559-c19c-44b0-bbbd-ce2b2da70d9d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.470576] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e54cd5-0ee7-4f09-97d6-b654e4874606 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.508874] env[65758]: DEBUG nova.compute.manager [req-7b9a5db1-b116-41fd-ae99-c97ad24b19d8 req-75f7fab2-4425-45a5-8c1a-9ea3b4975799 service nova] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Detach interface failed, port_id=3d310661-ff77-40cb-b141-66ffbfd71a3f, reason: Instance d1918f85-d122-4a84-88b3-f038e8c1149e could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1231.509602] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3998f5b0-dbae-4777-b05d-3bd988e9d31f tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "1f773924-74ee-4151-81ba-d105ce225289" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.073s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.562534] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e2f906-792a-4eb8-b0b7-c6faa4c7ede6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.569950] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.570203] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.576984] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57149c64-cbe2-4c73-89ec-c5eee271dcdc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.608072] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163f6e9f-85d4-4e27-bae0-9e1fe8507d83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.619610] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfe7428-c990-485b-bd43-e40083724377 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.626897] env[65758]: INFO nova.compute.manager [-] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Took 1.27 seconds to deallocate network for instance. [ 1231.638946] env[65758]: DEBUG nova.compute.provider_tree [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.640271] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661442, 'name': ReconfigVM_Task, 'duration_secs': 0.296687} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.643502] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.644558] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076eda69-ecc7-4ceb-8dd5-e38aa92ba8c8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.673283] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0528daf7-d0e7-4bee-8e70-19e30a5d1a9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.689933] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1231.689933] env[65758]: value = "task-4661443" [ 1231.689933] env[65758]: _type = "Task" [ 1231.689933] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.693728] env[65758]: DEBUG oslo_concurrency.lockutils [req-81f7fc21-e5e8-466f-a391-10db79b69027 req-0187d1e8-1788-4069-8839-ef625aaeb388 service nova] Releasing lock "refresh_cache-d1918f85-d122-4a84-88b3-f038e8c1149e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.699762] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661443, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.882520] env[65758]: DEBUG nova.objects.instance [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'flavor' on Instance uuid 1f773924-74ee-4151-81ba-d105ce225289 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1231.907279] env[65758]: DEBUG nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1231.937331] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1231.937593] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1231.937741] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1231.937918] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1231.938078] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1231.938224] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1231.938425] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1231.938578] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1231.938740] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1231.938896] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1231.939073] env[65758]: DEBUG nova.virt.hardware [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1231.940297] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cbddba-1fbe-4d19-841d-b00e30ffcbc3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.949511] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a711aeb6-c0fa-47ab-96ef-18702e087f70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.072407] env[65758]: DEBUG nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1232.144039] env[65758]: DEBUG nova.scheduler.client.report [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1232.148725] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.200332] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661443, 'name': ReconfigVM_Task, 'duration_secs': 0.155367} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.200612] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1232.200869] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c1971e0-b1e3-40b4-ba18-2694c2fd95b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.207930] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1232.207930] env[65758]: value = "task-4661444" [ 1232.207930] env[65758]: _type = "Task" [ 1232.207930] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.216408] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.300762] env[65758]: DEBUG nova.compute.manager [req-d21615d6-b040-469f-8c3b-bf84e4b301f4 req-fb2ce2bf-4206-41ea-b4c4-a462f6f1fda8 service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Received event network-vif-plugged-c0fe8827-b903-4031-a8be-c5b8a66577af {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1232.301012] env[65758]: DEBUG oslo_concurrency.lockutils [req-d21615d6-b040-469f-8c3b-bf84e4b301f4 req-fb2ce2bf-4206-41ea-b4c4-a462f6f1fda8 service nova] Acquiring lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.301320] env[65758]: DEBUG oslo_concurrency.lockutils [req-d21615d6-b040-469f-8c3b-bf84e4b301f4 req-fb2ce2bf-4206-41ea-b4c4-a462f6f1fda8 service nova] Lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.301524] env[65758]: DEBUG oslo_concurrency.lockutils [req-d21615d6-b040-469f-8c3b-bf84e4b301f4 req-fb2ce2bf-4206-41ea-b4c4-a462f6f1fda8 service nova] Lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.301729] env[65758]: DEBUG nova.compute.manager [req-d21615d6-b040-469f-8c3b-bf84e4b301f4 req-fb2ce2bf-4206-41ea-b4c4-a462f6f1fda8 service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] No waiting events found dispatching network-vif-plugged-c0fe8827-b903-4031-a8be-c5b8a66577af {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1232.301908] env[65758]: WARNING nova.compute.manager [req-d21615d6-b040-469f-8c3b-bf84e4b301f4 req-fb2ce2bf-4206-41ea-b4c4-a462f6f1fda8 service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Received unexpected event network-vif-plugged-c0fe8827-b903-4031-a8be-c5b8a66577af for instance with vm_state building and task_state spawning. [ 1232.388634] env[65758]: DEBUG oslo_concurrency.lockutils [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.388832] env[65758]: DEBUG oslo_concurrency.lockutils [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.389011] env[65758]: DEBUG nova.network.neutron [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1232.389197] env[65758]: DEBUG nova.objects.instance [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'info_cache' on Instance uuid 1f773924-74ee-4151-81ba-d105ce225289 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.394879] env[65758]: DEBUG nova.network.neutron [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Successfully updated port: c0fe8827-b903-4031-a8be-c5b8a66577af {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1232.600487] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.650468] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.769s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.650901] env[65758]: WARNING neutronclient.v2_0.client [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1232.653888] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.505s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.654182] env[65758]: DEBUG nova.objects.instance [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lazy-loading 'resources' on Instance uuid d1918f85-d122-4a84-88b3-f038e8c1149e {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.682849] env[65758]: INFO nova.network.neutron [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating port f3dcfa87-c097-4b94-bab6-e9fd7455605b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1232.722503] env[65758]: DEBUG oslo_vmware.api [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661444, 'name': PowerOnVM_Task, 'duration_secs': 0.420407} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.723522] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1232.727241] env[65758]: DEBUG nova.compute.manager [None req-81cb7f84-b900-4117-991d-d85defbb1323 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1232.728271] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbf916d-f0bf-4876-b0fb-9a6bc2de140b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.894686] env[65758]: DEBUG nova.objects.base [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Object Instance<1f773924-74ee-4151-81ba-d105ce225289> lazy-loaded attributes: flavor,info_cache {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1232.896413] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "refresh_cache-cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.896555] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "refresh_cache-cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.896722] env[65758]: DEBUG nova.network.neutron [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1233.272844] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfdff75-9f47-447b-bc60-feee31bab559 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.282137] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0d9ef7-8421-4c11-8e2b-c64fcbcc180a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.315263] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0932379f-252e-4bc6-bbd1-d6872fc9a69d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.324660] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b42c41-8365-4749-ab51-fae6d8a098a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.340431] env[65758]: DEBUG nova.compute.provider_tree [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.398604] env[65758]: WARNING neutronclient.v2_0.client [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1233.399373] env[65758]: WARNING openstack [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1233.399734] env[65758]: WARNING openstack [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1233.408027] env[65758]: WARNING openstack [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1233.408419] env[65758]: WARNING openstack [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1233.473033] env[65758]: DEBUG nova.network.neutron [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1233.644755] env[65758]: WARNING neutronclient.v2_0.client [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1233.645562] env[65758]: WARNING openstack [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1233.645919] env[65758]: WARNING openstack [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1233.698982] env[65758]: WARNING neutronclient.v2_0.client [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1233.699680] env[65758]: WARNING openstack [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1233.700048] env[65758]: WARNING openstack [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1233.844203] env[65758]: DEBUG nova.scheduler.client.report [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1233.860315] env[65758]: DEBUG nova.network.neutron [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Updating instance_info_cache with network_info: [{"id": "c0fe8827-b903-4031-a8be-c5b8a66577af", "address": "fa:16:3e:66:a5:00", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0fe8827-b9", "ovs_interfaceid": "c0fe8827-b903-4031-a8be-c5b8a66577af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1233.941706] env[65758]: DEBUG nova.network.neutron [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updating instance_info_cache with network_info: [{"id": "084d10cd-9734-4baf-91b3-892d54084a42", "address": "fa:16:3e:02:7e:09", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap084d10cd-97", "ovs_interfaceid": "084d10cd-9734-4baf-91b3-892d54084a42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1234.072020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "2bfca515-f4cb-4781-8423-aebf9477a69b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.072298] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "2bfca515-f4cb-4781-8423-aebf9477a69b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.219906] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.220341] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.220341] env[65758]: DEBUG nova.network.neutron [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1234.338169] env[65758]: DEBUG nova.compute.manager [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Received event network-changed-c0fe8827-b903-4031-a8be-c5b8a66577af {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1234.338376] env[65758]: DEBUG nova.compute.manager [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Refreshing instance network info cache due to event network-changed-c0fe8827-b903-4031-a8be-c5b8a66577af. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1234.338697] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Acquiring lock "refresh_cache-cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.349050] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.351340] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.751s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.352902] env[65758]: INFO nova.compute.claims [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1234.363493] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "refresh_cache-cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1234.363861] env[65758]: DEBUG nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Instance network_info: |[{"id": "c0fe8827-b903-4031-a8be-c5b8a66577af", "address": "fa:16:3e:66:a5:00", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0fe8827-b9", "ovs_interfaceid": "c0fe8827-b903-4031-a8be-c5b8a66577af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1234.364217] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Acquired lock "refresh_cache-cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.364387] env[65758]: DEBUG nova.network.neutron [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Refreshing network info cache for port c0fe8827-b903-4031-a8be-c5b8a66577af {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1234.365587] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:a5:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0fe8827-b903-4031-a8be-c5b8a66577af', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1234.373711] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1234.374240] env[65758]: INFO nova.scheduler.client.report [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Deleted allocations for instance d1918f85-d122-4a84-88b3-f038e8c1149e [ 1234.376110] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1234.376356] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1aaf881c-7ca3-4e28-b4f1-dd188c042812 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.400435] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1234.400435] env[65758]: value = "task-4661445" [ 1234.400435] env[65758]: _type = "Task" [ 1234.400435] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.409644] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661445, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.445531] env[65758]: DEBUG oslo_concurrency.lockutils [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1234.574831] env[65758]: DEBUG nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1234.724429] env[65758]: WARNING neutronclient.v2_0.client [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1234.725342] env[65758]: WARNING openstack [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1234.725811] env[65758]: WARNING openstack [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1234.877027] env[65758]: WARNING neutronclient.v2_0.client [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1234.877541] env[65758]: WARNING openstack [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1234.878366] env[65758]: WARNING openstack [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1234.899645] env[65758]: DEBUG oslo_concurrency.lockutils [None req-21caf0af-4116-479a-b659-ff81941a089f tempest-ServerRescueTestJSONUnderV235-825628614 tempest-ServerRescueTestJSONUnderV235-825628614-project-member] Lock "d1918f85-d122-4a84-88b3-f038e8c1149e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.167s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.913578] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661445, 'name': CreateVM_Task, 'duration_secs': 0.360041} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.913784] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1234.914382] env[65758]: WARNING neutronclient.v2_0.client [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1234.914806] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.914971] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.915408] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1234.916032] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6554c76c-acb9-4b78-a521-b1ce0388e03c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.922678] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1234.922678] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52aa232a-788a-7cb7-809e-3a389ba6127f" [ 1234.922678] env[65758]: _type = "Task" [ 1234.922678] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.935165] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52aa232a-788a-7cb7-809e-3a389ba6127f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.943598] env[65758]: WARNING neutronclient.v2_0.client [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1234.944297] env[65758]: WARNING openstack [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1234.944642] env[65758]: WARNING openstack [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1235.063063] env[65758]: DEBUG nova.network.neutron [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [{"id": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "address": "fa:16:3e:9d:65:b5", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dcfa87-c0", "ovs_interfaceid": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1235.099498] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.172062] env[65758]: WARNING neutronclient.v2_0.client [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1235.172788] env[65758]: WARNING openstack [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1235.173144] env[65758]: WARNING openstack [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1235.258547] env[65758]: DEBUG nova.network.neutron [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Updated VIF entry in instance network info cache for port c0fe8827-b903-4031-a8be-c5b8a66577af. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1235.258940] env[65758]: DEBUG nova.network.neutron [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Updating instance_info_cache with network_info: [{"id": "c0fe8827-b903-4031-a8be-c5b8a66577af", "address": "fa:16:3e:66:a5:00", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0fe8827-b9", "ovs_interfaceid": "c0fe8827-b903-4031-a8be-c5b8a66577af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1235.437663] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52aa232a-788a-7cb7-809e-3a389ba6127f, 'name': SearchDatastore_Task, 'duration_secs': 0.013223} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.438009] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.438256] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1235.438515] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.438624] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.438792] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1235.439080] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b510e47a-2b61-469a-baac-e1b33f541e3a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.450112] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1235.450363] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1235.451161] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fd79017-bf8e-4233-a29d-25722cd2d508 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.457064] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1235.457064] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d853bb5-486d-4802-83ab-a3b8c454f3ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.461435] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1235.461435] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a4946f-38f0-2e89-3598-03478e9c7c6f" [ 1235.461435] env[65758]: _type = "Task" [ 1235.461435] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.467956] env[65758]: DEBUG oslo_vmware.api [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1235.467956] env[65758]: value = "task-4661446" [ 1235.467956] env[65758]: _type = "Task" [ 1235.467956] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.471118] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a4946f-38f0-2e89-3598-03478e9c7c6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.480835] env[65758]: DEBUG oslo_vmware.api [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.505264] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd44070-3385-4750-b4fb-5df67d3b4630 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.513287] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3659f199-0fc0-400e-bbb6-ab7311ef44a9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.545839] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f02b5d-ac89-4319-86fc-4cc88e47ab7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.554575] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dad7b36-5afa-4734-9b1d-938d364fd0e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.570999] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.573296] env[65758]: DEBUG nova.compute.provider_tree [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1235.603212] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='68204b8220f68f02b7d3188cf3ce5302',container_format='bare',created_at=2025-11-21T13:24:20Z,direct_url=,disk_format='vmdk',id=81bd8cd1-7783-4705-af00-19222b217ece,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1395885144-shelved',owner='e2440f1694fe4b87a9827f6653ff2e4c',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-11-21T13:24:34Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1235.603479] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1235.603638] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1235.603978] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1235.604303] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1235.604464] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1235.604675] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1235.604828] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1235.604987] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1235.605236] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1235.605421] env[65758]: DEBUG nova.virt.hardware [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1235.606634] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfb55c2-6f65-4236-ae31-718a4f29e924 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.615960] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f3fecb-5cee-4aff-b4c6-b26e7f680a09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.631356] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:65:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3dcfa87-c097-4b94-bab6-e9fd7455605b', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1235.639726] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1235.640157] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1235.640407] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35e15939-f99d-43f1-ac61-20b9348696df {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.665478] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1235.665478] env[65758]: value = "task-4661447" [ 1235.665478] env[65758]: _type = "Task" [ 1235.665478] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.675174] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661447, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.761903] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Releasing lock "refresh_cache-cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.762382] env[65758]: DEBUG nova.compute.manager [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received event network-vif-plugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1235.762382] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.762583] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.762745] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.762918] env[65758]: DEBUG nova.compute.manager [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] No waiting events found dispatching network-vif-plugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1235.763245] env[65758]: WARNING nova.compute.manager [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received unexpected event network-vif-plugged-f3dcfa87-c097-4b94-bab6-e9fd7455605b for instance with vm_state shelved_offloaded and task_state spawning. [ 1235.763443] env[65758]: DEBUG nova.compute.manager [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received event network-changed-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1235.763604] env[65758]: DEBUG nova.compute.manager [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Refreshing instance network info cache due to event network-changed-f3dcfa87-c097-4b94-bab6-e9fd7455605b. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1235.763796] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Acquiring lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.763928] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Acquired lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.764151] env[65758]: DEBUG nova.network.neutron [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Refreshing network info cache for port f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1235.973043] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a4946f-38f0-2e89-3598-03478e9c7c6f, 'name': SearchDatastore_Task, 'duration_secs': 0.014087} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.977315] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75582c92-beee-47df-9852-e391222bda0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.986834] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1235.986834] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52c534fb-fa18-42ad-4da5-19fc30820904" [ 1235.986834] env[65758]: _type = "Task" [ 1235.986834] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.987127] env[65758]: DEBUG oslo_vmware.api [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661446, 'name': PowerOnVM_Task, 'duration_secs': 0.452208} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.987496] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1235.987701] env[65758]: DEBUG nova.compute.manager [None req-76fe4785-cf86-43e8-bf83-32860755539d tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1235.991585] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbf8730-7082-4324-ad31-8dc08fd8d41c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.004838] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52c534fb-fa18-42ad-4da5-19fc30820904, 'name': SearchDatastore_Task, 'duration_secs': 0.010705} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.005570] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.005915] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42/cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1236.006491] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2afd3c6-dba2-47e6-a1f3-8ccfe258d3e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.016584] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1236.016584] env[65758]: value = "task-4661448" [ 1236.016584] env[65758]: _type = "Task" [ 1236.016584] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.028739] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.077144] env[65758]: DEBUG nova.scheduler.client.report [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1236.180210] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661447, 'name': CreateVM_Task, 'duration_secs': 0.478592} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.180972] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1236.181453] env[65758]: WARNING neutronclient.v2_0.client [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1236.182380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.182380] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.182899] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1236.183312] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-432e6b2c-0e86-4d0b-b7e4-6c4f1f0ed031 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.193282] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1236.193282] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]526fefa7-cab6-b131-1c9b-b445bd02439c" [ 1236.193282] env[65758]: _type = "Task" [ 1236.193282] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.209607] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]526fefa7-cab6-b131-1c9b-b445bd02439c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.267996] env[65758]: WARNING neutronclient.v2_0.client [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1236.268804] env[65758]: WARNING openstack [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1236.269256] env[65758]: WARNING openstack [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1236.490401] env[65758]: WARNING neutronclient.v2_0.client [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1236.491073] env[65758]: WARNING openstack [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1236.491542] env[65758]: WARNING openstack [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1236.530462] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661448, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.583904] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.232s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.584836] env[65758]: DEBUG nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1236.588852] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.489s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.590430] env[65758]: INFO nova.compute.claims [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1236.595620] env[65758]: DEBUG nova.network.neutron [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updated VIF entry in instance network info cache for port f3dcfa87-c097-4b94-bab6-e9fd7455605b. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1236.595968] env[65758]: DEBUG nova.network.neutron [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [{"id": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "address": "fa:16:3e:9d:65:b5", "network": {"id": "4691593e-aee4-4ddb-ba73-023f799b1d6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1087435093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2440f1694fe4b87a9827f6653ff2e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dcfa87-c0", "ovs_interfaceid": "f3dcfa87-c097-4b94-bab6-e9fd7455605b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1236.704698] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.704912] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Processing image 81bd8cd1-7783-4705-af00-19222b217ece {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1236.705212] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece/81bd8cd1-7783-4705-af00-19222b217ece.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.705413] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece/81bd8cd1-7783-4705-af00-19222b217ece.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.705664] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.706928] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99c4ab9a-e67f-40d0-bad9-4fed0266d305 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.716385] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.717010] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1236.717603] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7238991e-75de-4dce-aa67-e3c9a8979f27 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.724177] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1236.724177] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520267a7-bf4b-a576-019b-a044550719b7" [ 1236.724177] env[65758]: _type = "Task" [ 1236.724177] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.733276] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520267a7-bf4b-a576-019b-a044550719b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.029481] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564839} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.029481] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42/cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1237.029897] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1237.029941] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4474662-f577-41c6-8295-87efe2ebc6ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.040052] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1237.040052] env[65758]: value = "task-4661449" [ 1237.040052] env[65758]: _type = "Task" [ 1237.040052] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.049531] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.095615] env[65758]: DEBUG nova.compute.utils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1237.097341] env[65758]: DEBUG nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1237.097548] env[65758]: DEBUG nova.network.neutron [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1237.097873] env[65758]: WARNING neutronclient.v2_0.client [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1237.098195] env[65758]: WARNING neutronclient.v2_0.client [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1237.098788] env[65758]: WARNING openstack [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1237.099169] env[65758]: WARNING openstack [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1237.109076] env[65758]: DEBUG oslo_concurrency.lockutils [req-82083287-0f63-4924-b918-1ccba7a620f6 req-b47b0ef4-3ffc-47bd-9de0-d91b4e2baadd service nova] Releasing lock "refresh_cache-bc10286b-195f-48a2-b16c-f8f925ec7a2a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.159014] env[65758]: DEBUG nova.policy [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91358f51732f44198a020f6669168408', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4095654557a34bb0907071aedb3bb678', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1237.236346] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Preparing fetch location {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1237.236560] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Fetch image to [datastore1] OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643/OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643.vmdk {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1237.236955] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Downloading stream optimized image 81bd8cd1-7783-4705-af00-19222b217ece to [datastore1] OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643/OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643.vmdk on the data store datastore1 as vApp {{(pid=65758) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1237.237272] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Downloading image file data 81bd8cd1-7783-4705-af00-19222b217ece to the ESX as VM named 'OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643' {{(pid=65758) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1237.338517] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1237.338517] env[65758]: value = "resgroup-9" [ 1237.338517] env[65758]: _type = "ResourcePool" [ 1237.338517] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1237.340753] env[65758]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e6b4c781-6004-4a29-b427-20d4d136a28e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.369471] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lease: (returnval){ [ 1237.369471] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52447f57-17f0-d98a-7d47-62b873ea191b" [ 1237.369471] env[65758]: _type = "HttpNfcLease" [ 1237.369471] env[65758]: } obtained for vApp import into resource pool (val){ [ 1237.369471] env[65758]: value = "resgroup-9" [ 1237.369471] env[65758]: _type = "ResourcePool" [ 1237.369471] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1237.369727] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the lease: (returnval){ [ 1237.369727] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52447f57-17f0-d98a-7d47-62b873ea191b" [ 1237.369727] env[65758]: _type = "HttpNfcLease" [ 1237.369727] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1237.379059] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1237.379059] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52447f57-17f0-d98a-7d47-62b873ea191b" [ 1237.379059] env[65758]: _type = "HttpNfcLease" [ 1237.379059] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1237.505673] env[65758]: DEBUG nova.network.neutron [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Successfully created port: 61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1237.550255] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.209072} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.550541] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1237.551447] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426bb253-1ed7-4f98-bc79-054e167ebb93 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.580259] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42/cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1237.580259] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c38c0406-e370-43b6-9654-c9b3b8734f22 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.606032] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1237.606032] env[65758]: value = "task-4661451" [ 1237.606032] env[65758]: _type = "Task" [ 1237.606032] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.606866] env[65758]: DEBUG nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1237.625443] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661451, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.766284] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e3204d-b97f-459b-afe5-9a42bc4b3940 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.782040] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7222ffa4-fae7-43bd-a6d2-584de21072ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.826227] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1d90c1-3c68-48be-b595-513877636710 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.836303] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca8957c-5448-4065-923f-780100e30195 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.858662] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "45c9678b-0478-4192-8684-3b6fb0f4831e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.858986] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "45c9678b-0478-4192-8684-3b6fb0f4831e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.860622] env[65758]: DEBUG nova.compute.provider_tree [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.881102] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1237.881102] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52447f57-17f0-d98a-7d47-62b873ea191b" [ 1237.881102] env[65758]: _type = "HttpNfcLease" [ 1237.881102] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1238.105027] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3d8047-ee5c-43df-97cf-f308fd14bc58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.121349] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661451, 'name': ReconfigVM_Task, 'duration_secs': 0.435126} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.123871] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Reconfigured VM instance instance-00000074 to attach disk [datastore1] cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42/cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1238.124561] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e24a2140-2f9b-47c9-b9f3-d6fd5fba180c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Suspending the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1238.124806] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45da6f7c-f5d8-42fc-9f1c-67e97786effc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.126740] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9b790025-924e-47f7-961f-571b54f60043 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.134328] env[65758]: DEBUG oslo_vmware.api [None req-e24a2140-2f9b-47c9-b9f3-d6fd5fba180c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1238.134328] env[65758]: value = "task-4661453" [ 1238.134328] env[65758]: _type = "Task" [ 1238.134328] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.135451] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1238.135451] env[65758]: value = "task-4661452" [ 1238.135451] env[65758]: _type = "Task" [ 1238.135451] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.151894] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661452, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.152194] env[65758]: DEBUG oslo_vmware.api [None req-e24a2140-2f9b-47c9-b9f3-d6fd5fba180c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661453, 'name': SuspendVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.363249] env[65758]: DEBUG nova.compute.manager [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1238.367138] env[65758]: DEBUG nova.scheduler.client.report [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.378878] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1238.378878] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52447f57-17f0-d98a-7d47-62b873ea191b" [ 1238.378878] env[65758]: _type = "HttpNfcLease" [ 1238.378878] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1238.623457] env[65758]: DEBUG nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1238.655388] env[65758]: DEBUG oslo_vmware.api [None req-e24a2140-2f9b-47c9-b9f3-d6fd5fba180c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661453, 'name': SuspendVM_Task} progress is 58%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.655672] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661452, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.658162] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1238.658430] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1238.658581] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1238.658761] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1238.658917] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1238.659101] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1238.659320] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1238.659478] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1238.659638] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1238.659795] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1238.659961] env[65758]: DEBUG nova.virt.hardware [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1238.660804] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460e9a34-0113-4d98-8648-ed20917d5d1c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.669894] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e88f1e3-a766-45d4-877b-e346d703bc88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.874927] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.286s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.875534] env[65758]: DEBUG nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1238.889300] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1238.889300] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52447f57-17f0-d98a-7d47-62b873ea191b" [ 1238.889300] env[65758]: _type = "HttpNfcLease" [ 1238.889300] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1238.900744] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.901062] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.902710] env[65758]: INFO nova.compute.claims [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1239.008888] env[65758]: DEBUG nova.compute.manager [req-721728b4-e564-4c99-ba2d-cb214bf53daa req-3194b16c-c4cf-4422-8bbd-e670cc4df560 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received event network-vif-plugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1239.009230] env[65758]: DEBUG oslo_concurrency.lockutils [req-721728b4-e564-4c99-ba2d-cb214bf53daa req-3194b16c-c4cf-4422-8bbd-e670cc4df560 service nova] Acquiring lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1239.010159] env[65758]: DEBUG oslo_concurrency.lockutils [req-721728b4-e564-4c99-ba2d-cb214bf53daa req-3194b16c-c4cf-4422-8bbd-e670cc4df560 service nova] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.010159] env[65758]: DEBUG oslo_concurrency.lockutils [req-721728b4-e564-4c99-ba2d-cb214bf53daa req-3194b16c-c4cf-4422-8bbd-e670cc4df560 service nova] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.010159] env[65758]: DEBUG nova.compute.manager [req-721728b4-e564-4c99-ba2d-cb214bf53daa req-3194b16c-c4cf-4422-8bbd-e670cc4df560 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] No waiting events found dispatching network-vif-plugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1239.010159] env[65758]: WARNING nova.compute.manager [req-721728b4-e564-4c99-ba2d-cb214bf53daa req-3194b16c-c4cf-4422-8bbd-e670cc4df560 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received unexpected event network-vif-plugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 for instance with vm_state building and task_state spawning. [ 1239.109841] env[65758]: DEBUG nova.network.neutron [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Successfully updated port: 61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1239.152124] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661452, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.155902] env[65758]: DEBUG oslo_vmware.api [None req-e24a2140-2f9b-47c9-b9f3-d6fd5fba180c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661453, 'name': SuspendVM_Task, 'duration_secs': 0.709369} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.156249] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e24a2140-2f9b-47c9-b9f3-d6fd5fba180c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Suspended the VM {{(pid=65758) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1239.156479] env[65758]: DEBUG nova.compute.manager [None req-e24a2140-2f9b-47c9-b9f3-d6fd5fba180c tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1239.157391] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817b0a84-795e-487d-8e5e-ad1aea65d752 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.383077] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1239.383077] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52447f57-17f0-d98a-7d47-62b873ea191b" [ 1239.383077] env[65758]: _type = "HttpNfcLease" [ 1239.383077] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1239.383461] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1239.383461] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52447f57-17f0-d98a-7d47-62b873ea191b" [ 1239.383461] env[65758]: _type = "HttpNfcLease" [ 1239.383461] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1239.384427] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731b3d85-2dfb-477d-8cd8-3c35bfbe4532 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.389028] env[65758]: DEBUG nova.compute.utils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1239.389730] env[65758]: DEBUG nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1239.389964] env[65758]: DEBUG nova.network.neutron [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1239.390292] env[65758]: WARNING neutronclient.v2_0.client [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1239.390596] env[65758]: WARNING neutronclient.v2_0.client [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1239.391188] env[65758]: WARNING openstack [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1239.391536] env[65758]: WARNING openstack [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1239.400363] env[65758]: DEBUG nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1239.408398] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523f083e-25f1-9a24-379e-c951a50a5606/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1239.408533] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523f083e-25f1-9a24-379e-c951a50a5606/disk-0.vmdk. {{(pid=65758) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1239.468935] env[65758]: DEBUG nova.policy [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec1f0d5eb8304e50b64a102ee8b01a8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a8729d781b1450e9b366785f96f9938', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1239.478373] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cef94bd4-2b9e-4c5f-be2d-5d276c418382 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.612271] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.612391] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.612578] env[65758]: DEBUG nova.network.neutron [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1239.653465] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661452, 'name': Rename_Task, 'duration_secs': 1.453348} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.653931] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1239.654181] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c84961e4-54d3-4075-9b4f-f6f33a00a9cd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.661807] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1239.661807] env[65758]: value = "task-4661454" [ 1239.661807] env[65758]: _type = "Task" [ 1239.661807] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.673413] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.795471] env[65758]: DEBUG nova.network.neutron [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Successfully created port: f268cd74-fb17-4936-92b2-939e07f2fdfa {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1240.101368] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814d310e-dac5-4dcb-8c98-717ece5c3889 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.112357] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1def1a8-0436-49e1-a50a-b99356bfea5a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.116838] env[65758]: WARNING openstack [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1240.117359] env[65758]: WARNING openstack [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1240.163667] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409639d2-8c70-420a-8c86-d2e3930cf71a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.176612] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661454, 'name': PowerOnVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.182408] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237625b2-bfd0-4244-ad51-83f1637c05eb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.200813] env[65758]: DEBUG nova.compute.provider_tree [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.209123] env[65758]: DEBUG nova.network.neutron [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1240.342325] env[65758]: WARNING neutronclient.v2_0.client [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1240.343249] env[65758]: WARNING openstack [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1240.346167] env[65758]: WARNING openstack [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1240.416937] env[65758]: DEBUG nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1240.449475] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1240.449803] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1240.449951] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1240.450272] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1240.450490] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1240.450576] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1240.450787] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1240.450977] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1240.451193] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1240.451373] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1240.451595] env[65758]: DEBUG nova.virt.hardware [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1240.453289] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d915735-eaad-4765-99bc-2b5b27b6ef8c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.466643] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba87f5a-2be5-42ba-837b-c23b135e6405 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.494651] env[65758]: DEBUG nova.network.neutron [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [{"id": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "address": "fa:16:3e:45:e5:9e", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61227a3e-82", "ovs_interfaceid": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1240.678072] env[65758]: DEBUG oslo_vmware.api [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661454, 'name': PowerOnVM_Task, 'duration_secs': 0.612553} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.678421] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1240.678742] env[65758]: INFO nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Took 8.77 seconds to spawn the instance on the hypervisor. [ 1240.678979] env[65758]: DEBUG nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1240.679878] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22681da6-441b-4387-a17b-5cc31b760b0e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.706862] env[65758]: DEBUG nova.scheduler.client.report [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1240.813382] env[65758]: INFO nova.compute.manager [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Resuming [ 1240.814388] env[65758]: DEBUG nova.objects.instance [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'flavor' on Instance uuid 1f773924-74ee-4151-81ba-d105ce225289 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1240.933318] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Completed reading data from the image iterator. {{(pid=65758) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1240.933563] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523f083e-25f1-9a24-379e-c951a50a5606/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1240.934485] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33018af6-635b-4d89-a282-fd5f4069dc6d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.941743] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523f083e-25f1-9a24-379e-c951a50a5606/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1240.941904] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523f083e-25f1-9a24-379e-c951a50a5606/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1240.942180] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-ff12e03a-dfd8-4a68-8fc7-d2f51344fb20 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.997986] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.998382] env[65758]: DEBUG nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Instance network_info: |[{"id": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "address": "fa:16:3e:45:e5:9e", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61227a3e-82", "ovs_interfaceid": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1240.998870] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:e5:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '438671d0-9468-4e44-84c1-4c0ebaa743e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61227a3e-82c2-4ebf-b71b-b953b5667f90', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1241.006909] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1241.007173] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1241.007447] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed7af032-afc3-4094-b1e1-51bb39cf2d1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.027799] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1241.027799] env[65758]: value = "task-4661455" [ 1241.027799] env[65758]: _type = "Task" [ 1241.027799] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.037504] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661455, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.041102] env[65758]: DEBUG nova.compute.manager [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received event network-changed-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1241.041301] env[65758]: DEBUG nova.compute.manager [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Refreshing instance network info cache due to event network-changed-61227a3e-82c2-4ebf-b71b-b953b5667f90. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1241.041512] env[65758]: DEBUG oslo_concurrency.lockutils [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] Acquiring lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.041648] env[65758]: DEBUG oslo_concurrency.lockutils [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] Acquired lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.041823] env[65758]: DEBUG nova.network.neutron [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Refreshing network info cache for port 61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1241.202838] env[65758]: INFO nova.compute.manager [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Took 15.62 seconds to build instance. [ 1241.214564] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.215920] env[65758]: DEBUG nova.compute.manager [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1241.398473] env[65758]: DEBUG oslo_vmware.rw_handles [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523f083e-25f1-9a24-379e-c951a50a5606/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1241.398697] env[65758]: INFO nova.virt.vmwareapi.images [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Downloaded image file data 81bd8cd1-7783-4705-af00-19222b217ece [ 1241.400024] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1954e4-0e7b-4802-8005-2c3557957880 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.418288] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a93b1aac-4384-4f29-afc3-ca9000cdda77 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.434033] env[65758]: DEBUG nova.network.neutron [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Successfully updated port: f268cd74-fb17-4936-92b2-939e07f2fdfa {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1241.455411] env[65758]: INFO nova.virt.vmwareapi.images [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] The imported VM was unregistered [ 1241.459166] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Caching image {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1241.459166] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Creating directory with path [datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1241.459817] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49adead7-f98b-4270-b18d-a533bf46da3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.473275] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Created directory with path [datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1241.473487] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643/OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643.vmdk to [datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece/81bd8cd1-7783-4705-af00-19222b217ece.vmdk. {{(pid=65758) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1241.473838] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ac1daab8-5b72-47b2-811a-4f64c7c4c11a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.482142] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1241.482142] env[65758]: value = "task-4661457" [ 1241.482142] env[65758]: _type = "Task" [ 1241.482142] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.491867] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661457, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.540014] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661455, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.546855] env[65758]: WARNING neutronclient.v2_0.client [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1241.547674] env[65758]: WARNING openstack [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1241.548089] env[65758]: WARNING openstack [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1241.701173] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "a9550f72-009c-4143-afe2-887727e5c071" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.701839] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.705439] env[65758]: DEBUG oslo_concurrency.lockutils [None req-90188b09-2306-42ba-983f-2ed31a70c8b0 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.129s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.710976] env[65758]: WARNING neutronclient.v2_0.client [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1241.711735] env[65758]: WARNING openstack [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1241.712117] env[65758]: WARNING openstack [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1241.722378] env[65758]: DEBUG nova.compute.utils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1241.724408] env[65758]: DEBUG nova.compute.manager [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 1241.804682] env[65758]: DEBUG nova.network.neutron [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updated VIF entry in instance network info cache for port 61227a3e-82c2-4ebf-b71b-b953b5667f90. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1241.804800] env[65758]: DEBUG nova.network.neutron [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [{"id": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "address": "fa:16:3e:45:e5:9e", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61227a3e-82", "ovs_interfaceid": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1241.936786] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.936973] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.937116] env[65758]: DEBUG nova.network.neutron [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1241.994097] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661457, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.040645] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661455, 'name': CreateVM_Task, 'duration_secs': 0.582125} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.040863] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1242.041514] env[65758]: WARNING neutronclient.v2_0.client [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1242.041943] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.042109] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.042497] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1242.042795] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1584c283-2d21-4b79-98b2-205cf435b8f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.049647] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1242.049647] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5257c841-a65a-23c9-ee98-772da66458a7" [ 1242.049647] env[65758]: _type = "Task" [ 1242.049647] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.059984] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5257c841-a65a-23c9-ee98-772da66458a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.205323] env[65758]: DEBUG nova.compute.utils [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1242.225659] env[65758]: DEBUG nova.compute.manager [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1242.308269] env[65758]: DEBUG oslo_concurrency.lockutils [req-b8af1b07-1024-450f-ae86-4b362d248beb req-d27e4a3f-20ed-4ec7-aafa-e1c427289ddc service nova] Releasing lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.326358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.326603] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquired lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.326790] env[65758]: DEBUG nova.network.neutron [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1242.442618] env[65758]: WARNING openstack [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1242.443129] env[65758]: WARNING openstack [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1242.476180] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "c97f02fc-a244-40e9-97b3-8cbbf516607a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.476416] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "c97f02fc-a244-40e9-97b3-8cbbf516607a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.491150] env[65758]: DEBUG nova.network.neutron [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1242.499946] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661457, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.561348] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5257c841-a65a-23c9-ee98-772da66458a7, 'name': SearchDatastore_Task, 'duration_secs': 0.053616} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.562651] env[65758]: WARNING neutronclient.v2_0.client [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1242.563309] env[65758]: WARNING openstack [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1242.563653] env[65758]: WARNING openstack [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1242.571521] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.571771] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1242.572020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.572185] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.572366] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1242.573227] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dff06be-9e21-4511-b19f-27669f183003 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.586528] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1242.587080] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1242.588123] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80bb7040-635f-4228-91c5-08268b8b7d10 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.595084] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1242.595084] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e5f0b8-01fe-8c59-95bb-3a086fbe38bc" [ 1242.595084] env[65758]: _type = "Task" [ 1242.595084] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.604706] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e5f0b8-01fe-8c59-95bb-3a086fbe38bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.670976] env[65758]: DEBUG nova.network.neutron [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Updating instance_info_cache with network_info: [{"id": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "address": "fa:16:3e:71:5a:a4", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf268cd74-fb", "ovs_interfaceid": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1242.708968] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.830278] env[65758]: WARNING neutronclient.v2_0.client [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1242.831170] env[65758]: WARNING openstack [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1242.831702] env[65758]: WARNING openstack [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1242.978616] env[65758]: DEBUG nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1242.995941] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661457, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.040663] env[65758]: WARNING neutronclient.v2_0.client [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1243.041369] env[65758]: WARNING openstack [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1243.041730] env[65758]: WARNING openstack [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1243.081909] env[65758]: DEBUG nova.compute.manager [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Received event network-vif-plugged-f268cd74-fb17-4936-92b2-939e07f2fdfa {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1243.082403] env[65758]: DEBUG oslo_concurrency.lockutils [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Acquiring lock "2bfca515-f4cb-4781-8423-aebf9477a69b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.082612] env[65758]: DEBUG oslo_concurrency.lockutils [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Lock "2bfca515-f4cb-4781-8423-aebf9477a69b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.082785] env[65758]: DEBUG oslo_concurrency.lockutils [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Lock "2bfca515-f4cb-4781-8423-aebf9477a69b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.083013] env[65758]: DEBUG nova.compute.manager [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] No waiting events found dispatching network-vif-plugged-f268cd74-fb17-4936-92b2-939e07f2fdfa {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1243.083229] env[65758]: WARNING nova.compute.manager [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Received unexpected event network-vif-plugged-f268cd74-fb17-4936-92b2-939e07f2fdfa for instance with vm_state building and task_state spawning. [ 1243.083404] env[65758]: DEBUG nova.compute.manager [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Received event network-changed-f268cd74-fb17-4936-92b2-939e07f2fdfa {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1243.083619] env[65758]: DEBUG nova.compute.manager [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Refreshing instance network info cache due to event network-changed-f268cd74-fb17-4936-92b2-939e07f2fdfa. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1243.083814] env[65758]: DEBUG oslo_concurrency.lockutils [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Acquiring lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.110208] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e5f0b8-01fe-8c59-95bb-3a086fbe38bc, 'name': SearchDatastore_Task, 'duration_secs': 0.050011} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.111151] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beabe13c-6d18-48a4-b713-0feccd53f045 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.118245] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1243.118245] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527385cb-3a99-4853-31b6-fd2890386e3b" [ 1243.118245] env[65758]: _type = "Task" [ 1243.118245] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.128522] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527385cb-3a99-4853-31b6-fd2890386e3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.143207] env[65758]: DEBUG nova.network.neutron [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updating instance_info_cache with network_info: [{"id": "084d10cd-9734-4baf-91b3-892d54084a42", "address": "fa:16:3e:02:7e:09", "network": {"id": "8fa470bd-5fff-4b6d-a10b-eec090c62e98", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-409468481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4c2ab2b80c04c38bfb4c7cafac87fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap084d10cd-97", "ovs_interfaceid": "084d10cd-9734-4baf-91b3-892d54084a42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1243.174172] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.174665] env[65758]: DEBUG nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Instance network_info: |[{"id": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "address": "fa:16:3e:71:5a:a4", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf268cd74-fb", "ovs_interfaceid": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1243.175015] env[65758]: DEBUG oslo_concurrency.lockutils [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Acquired lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.175302] env[65758]: DEBUG nova.network.neutron [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Refreshing network info cache for port f268cd74-fb17-4936-92b2-939e07f2fdfa {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1243.176857] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:5a:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd22cb4ec-277f-41ee-8aba-b3d54442b93d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f268cd74-fb17-4936-92b2-939e07f2fdfa', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1243.186578] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1243.187922] env[65758]: WARNING neutronclient.v2_0.client [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1243.188637] env[65758]: WARNING openstack [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1243.189249] env[65758]: WARNING openstack [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1243.197409] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1243.198475] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41d2a4b7-6941-41f3-adef-aa3b0017db91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.222643] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1243.222643] env[65758]: value = "task-4661458" [ 1243.222643] env[65758]: _type = "Task" [ 1243.222643] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.232995] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661458, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.237572] env[65758]: DEBUG nova.compute.manager [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1243.270651] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1243.271080] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1243.271414] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1243.271728] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1243.271969] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1243.272165] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1243.272437] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1243.272621] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1243.272819] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1243.273019] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1243.273250] env[65758]: DEBUG nova.virt.hardware [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1243.274730] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0afa35-2fbb-4940-bd8d-425574dca46d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.294984] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fca636-7750-4697-b4de-b92a66f9f32d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.313904] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1243.321133] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Creating folder: Project (e65a59f8a24c480abf2ec74bcd486000). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1243.325785] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eaf94022-0703-4b91-a489-9755a842731d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.342214] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Created folder: Project (e65a59f8a24c480abf2ec74bcd486000) in parent group-v909763. [ 1243.342468] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Creating folder: Instances. Parent ref: group-v910089. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1243.342843] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aaa087ec-9b72-46be-a52f-8598f786ed9e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.357501] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Created folder: Instances in parent group-v910089. [ 1243.357708] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1243.360940] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1243.361510] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3d34112-090f-416a-94aa-29e5989f2b3e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.378649] env[65758]: WARNING neutronclient.v2_0.client [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1243.379362] env[65758]: WARNING openstack [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1243.379730] env[65758]: WARNING openstack [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1243.391067] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1243.391067] env[65758]: value = "task-4661461" [ 1243.391067] env[65758]: _type = "Task" [ 1243.391067] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.401209] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661461, 'name': CreateVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.478027] env[65758]: DEBUG nova.network.neutron [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Updated VIF entry in instance network info cache for port f268cd74-fb17-4936-92b2-939e07f2fdfa. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1243.478027] env[65758]: DEBUG nova.network.neutron [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Updating instance_info_cache with network_info: [{"id": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "address": "fa:16:3e:71:5a:a4", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf268cd74-fb", "ovs_interfaceid": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1243.499977] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661457, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.511212] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.511540] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.513261] env[65758]: INFO nova.compute.claims [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1243.631065] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527385cb-3a99-4853-31b6-fd2890386e3b, 'name': SearchDatastore_Task, 'duration_secs': 0.044806} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.631384] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.631647] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742/e5b042e0-3dba-4bfe-9e4d-1d55bcb72742.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1243.631922] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f5cfb84-9c1d-4da7-80a2-3110c5d32418 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.641490] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1243.641490] env[65758]: value = "task-4661462" [ 1243.641490] env[65758]: _type = "Task" [ 1243.641490] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.645985] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Releasing lock "refresh_cache-1f773924-74ee-4151-81ba-d105ce225289" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.646960] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0284904b-0bcb-43c4-abd0-e0d4b5537c55 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.652966] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.659951] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Resuming the VM {{(pid=65758) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1243.660325] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d592781b-96e4-490d-9a39-c82cd552c467 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.671783] env[65758]: DEBUG oslo_vmware.api [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1243.671783] env[65758]: value = "task-4661463" [ 1243.671783] env[65758]: _type = "Task" [ 1243.671783] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.683896] env[65758]: DEBUG oslo_vmware.api [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661463, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.734441] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661458, 'name': CreateVM_Task, 'duration_secs': 0.42536} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.734615] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1243.735171] env[65758]: WARNING neutronclient.v2_0.client [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1243.735566] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.735812] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.736066] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1243.736396] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0a537fc-4632-48b3-9b21-64c913f110f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.743685] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1243.743685] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52774b64-64f2-b72d-8fc8-cd012e8ea8d8" [ 1243.743685] env[65758]: _type = "Task" [ 1243.743685] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.755133] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52774b64-64f2-b72d-8fc8-cd012e8ea8d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.795238] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "a9550f72-009c-4143-afe2-887727e5c071" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.795634] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.795938] env[65758]: INFO nova.compute.manager [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Attaching volume d86bceca-2f54-4f80-89ad-662fb3a8104a to /dev/sdb [ 1243.838574] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3e880a-e6b3-4634-8642-ce98b6dc0ba5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.851905] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a93e6d-d652-49ba-8c4f-b4aa0e517542 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.868458] env[65758]: DEBUG nova.virt.block_device [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Updating existing volume attachment record: 0f56013a-6b08-4961-86e4-3e578d65df55 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1243.904462] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661461, 'name': CreateVM_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.981641] env[65758]: DEBUG oslo_concurrency.lockutils [req-368194b2-8709-4009-ba2f-5234330ab745 req-71cac433-50af-44cb-9510-b87c00e00775 service nova] Releasing lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.995759] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661457, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.153163] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661462, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.183475] env[65758]: DEBUG oslo_vmware.api [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661463, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.260189] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52774b64-64f2-b72d-8fc8-cd012e8ea8d8, 'name': SearchDatastore_Task, 'duration_secs': 0.060201} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.260835] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.260835] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1244.261031] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.261188] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.261399] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1244.261718] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92cd48d7-9555-4180-8ea7-aad721436b4e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.279957] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1244.280215] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1244.281104] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aef2957-a037-437e-a07e-a6149f27c528 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.290143] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1244.290143] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5271067a-6bd4-ff8c-6b22-61bd5a20646c" [ 1244.290143] env[65758]: _type = "Task" [ 1244.290143] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.303156] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5271067a-6bd4-ff8c-6b22-61bd5a20646c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.404328] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661461, 'name': CreateVM_Task, 'duration_secs': 0.738108} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.404548] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1244.405019] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.405302] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.405752] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1244.406112] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ab7d8e-0d18-49a8-983a-b3962a60ba2a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.414667] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1244.414667] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5203a861-ba01-138d-d214-ad9b9ada7934" [ 1244.414667] env[65758]: _type = "Task" [ 1244.414667] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.426132] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5203a861-ba01-138d-d214-ad9b9ada7934, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.496138] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661457, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.672699} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.496507] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643/OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643.vmdk to [datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece/81bd8cd1-7783-4705-af00-19222b217ece.vmdk. [ 1244.496700] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Cleaning up location [datastore1] OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643 {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1244.496860] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_6948d21c-b4f6-4121-89a0-10e26fc0e643 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.497141] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a05e731e-34fb-4df1-9bae-c87afd850184 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.504427] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1244.504427] env[65758]: value = "task-4661467" [ 1244.504427] env[65758]: _type = "Task" [ 1244.504427] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.513010] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.654498] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661462, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.826071} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.654768] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742/e5b042e0-3dba-4bfe-9e4d-1d55bcb72742.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1244.654977] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1244.655311] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bd13bab-9c46-42ff-aba4-f2cecb9e1373 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.658972] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0144cdd0-5b9e-4995-a541-c071b93b6689 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.664031] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1244.664031] env[65758]: value = "task-4661468" [ 1244.664031] env[65758]: _type = "Task" [ 1244.664031] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.669825] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfcf882f-ab1d-402b-983e-464d92f54339 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.679493] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661468, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.706423] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03234979-d520-45e4-89ae-0f0bec1ad38e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.712294] env[65758]: DEBUG oslo_vmware.api [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661463, 'name': PowerOnVM_Task, 'duration_secs': 0.598422} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.713018] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Resumed the VM {{(pid=65758) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1244.713337] env[65758]: DEBUG nova.compute.manager [None req-5ef09b95-847c-4c1d-b623-d213a0c7cb1b tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1244.714544] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b564e8a-d5ff-4775-b147-1f54617470c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.721220] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f3d83c-dbf5-4c08-80a2-260f594adcc1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.740479] env[65758]: DEBUG nova.compute.provider_tree [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.801206] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5271067a-6bd4-ff8c-6b22-61bd5a20646c, 'name': SearchDatastore_Task, 'duration_secs': 0.054041} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.802125] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6df56aab-e595-4635-bd79-b3bbc96286b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.809239] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1244.809239] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5250572a-2330-fb74-99d6-e11f0ad4da34" [ 1244.809239] env[65758]: _type = "Task" [ 1244.809239] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.818267] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5250572a-2330-fb74-99d6-e11f0ad4da34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.934799] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5203a861-ba01-138d-d214-ad9b9ada7934, 'name': SearchDatastore_Task, 'duration_secs': 0.033165} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.935258] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.936400] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1244.936665] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.014702] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076794} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.014993] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1245.015204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece/81bd8cd1-7783-4705-af00-19222b217ece.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.015555] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece/81bd8cd1-7783-4705-af00-19222b217ece.vmdk to [datastore1] bc10286b-195f-48a2-b16c-f8f925ec7a2a/bc10286b-195f-48a2-b16c-f8f925ec7a2a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1245.015858] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b1ec79f-caa6-4de8-8968-426586aade41 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.025586] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1245.025586] env[65758]: value = "task-4661469" [ 1245.025586] env[65758]: _type = "Task" [ 1245.025586] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.036358] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.179073] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661468, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.179388] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1245.180309] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e380b57-3e86-4c64-8d5d-a5eeb9015123 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.209079] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742/e5b042e0-3dba-4bfe-9e4d-1d55bcb72742.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1245.209332] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e43aab82-6cd4-41ea-a886-d750ce713805 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.236223] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1245.236223] env[65758]: value = "task-4661470" [ 1245.236223] env[65758]: _type = "Task" [ 1245.236223] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.247279] env[65758]: DEBUG nova.scheduler.client.report [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1245.251203] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661470, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.328907] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5250572a-2330-fb74-99d6-e11f0ad4da34, 'name': SearchDatastore_Task, 'duration_secs': 0.011143} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.329799] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.330188] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b/2bfca515-f4cb-4781-8423-aebf9477a69b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1245.330553] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.330820] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1245.331108] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2a9fbce-a187-4f2a-9ba8-cbff65944a29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.333978] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46566b3e-c6f5-4729-b58e-1b2e5eaf12f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.344562] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1245.344562] env[65758]: value = "task-4661471" [ 1245.344562] env[65758]: _type = "Task" [ 1245.344562] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.349640] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1245.349884] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1245.351237] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43fe0610-a9a8-4eaf-a420-c5b58b38cbc6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.358893] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.360644] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1245.360644] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d68a51-c1e9-be8e-ea44-53e91d066026" [ 1245.360644] env[65758]: _type = "Task" [ 1245.360644] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.370673] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d68a51-c1e9-be8e-ea44-53e91d066026, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.538424] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661469, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.660449] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "1f773924-74ee-4151-81ba-d105ce225289" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.660869] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "1f773924-74ee-4151-81ba-d105ce225289" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.661215] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "1f773924-74ee-4151-81ba-d105ce225289-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.661467] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "1f773924-74ee-4151-81ba-d105ce225289-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.661700] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "1f773924-74ee-4151-81ba-d105ce225289-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.664476] env[65758]: INFO nova.compute.manager [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Terminating instance [ 1245.748763] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661470, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.752865] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.241s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.753471] env[65758]: DEBUG nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1245.859721] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.875563] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d68a51-c1e9-be8e-ea44-53e91d066026, 'name': SearchDatastore_Task, 'duration_secs': 0.031515} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.876512] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ea85542-9323-49db-9fe5-b5eed411a9e6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.883922] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1245.883922] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5292c5f3-fcb0-9e07-b74d-c23fa8debd67" [ 1245.883922] env[65758]: _type = "Task" [ 1245.883922] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.895125] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5292c5f3-fcb0-9e07-b74d-c23fa8debd67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.039023] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661469, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.169926] env[65758]: DEBUG nova.compute.manager [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1246.170204] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1246.171232] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccb8f5f-4fbb-417b-97e7-e415cc1525d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.182243] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.182243] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c19e7ab7-1727-4676-a688-22ad3ab18118 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.188247] env[65758]: DEBUG oslo_vmware.api [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1246.188247] env[65758]: value = "task-4661472" [ 1246.188247] env[65758]: _type = "Task" [ 1246.188247] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.197981] env[65758]: DEBUG oslo_vmware.api [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.249277] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661470, 'name': ReconfigVM_Task, 'duration_secs': 0.58013} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.249565] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Reconfigured VM instance instance-00000075 to attach disk [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742/e5b042e0-3dba-4bfe-9e4d-1d55bcb72742.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1246.250288] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b21b26ca-be63-4086-86bd-8650c4595cbf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.259055] env[65758]: DEBUG nova.compute.utils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1246.260829] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1246.260829] env[65758]: value = "task-4661474" [ 1246.260829] env[65758]: _type = "Task" [ 1246.260829] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.261387] env[65758]: DEBUG nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1246.261643] env[65758]: DEBUG nova.network.neutron [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1246.261993] env[65758]: WARNING neutronclient.v2_0.client [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1246.262396] env[65758]: WARNING neutronclient.v2_0.client [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1246.263128] env[65758]: WARNING openstack [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1246.263496] env[65758]: WARNING openstack [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1246.283387] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661474, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.331781] env[65758]: DEBUG nova.policy [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc85d2d1d84f4df0b4de5e6388bb9398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82bfbb5ee6714c9aa5119cb714d28ce2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1246.357124] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661471, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.396110] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5292c5f3-fcb0-9e07-b74d-c23fa8debd67, 'name': SearchDatastore_Task, 'duration_secs': 0.052692} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.396429] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.396725] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e/45c9678b-0478-4192-8684-3b6fb0f4831e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1246.397130] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf4414cc-0ed0-409e-a404-97de3f28e2d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.406360] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1246.406360] env[65758]: value = "task-4661475" [ 1246.406360] env[65758]: _type = "Task" [ 1246.406360] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.417552] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661475, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.538362] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661469, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.667537] env[65758]: DEBUG nova.network.neutron [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Successfully created port: 61a8c45c-30ac-46ef-869c-09bcc14a67df {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1246.701106] env[65758]: DEBUG oslo_vmware.api [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661472, 'name': PowerOffVM_Task, 'duration_secs': 0.269631} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.701415] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1246.701594] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1246.701868] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29e271a4-be59-413d-8a50-ca7c194e1dbb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.762782] env[65758]: DEBUG nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1246.780772] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1246.781191] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1246.782721] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleting the datastore file [datastore2] 1f773924-74ee-4151-81ba-d105ce225289 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1246.785966] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-382c0a3c-6a83-4d3b-a5c7-cef166a3ba9d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.788321] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661474, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.798510] env[65758]: DEBUG oslo_vmware.api [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for the task: (returnval){ [ 1246.798510] env[65758]: value = "task-4661477" [ 1246.798510] env[65758]: _type = "Task" [ 1246.798510] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.813921] env[65758]: DEBUG oslo_vmware.api [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.862469] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661471, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.926356] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661475, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.044696] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661469, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.284896] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661474, 'name': Rename_Task, 'duration_secs': 0.744324} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.285335] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1247.285689] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a247effc-94ab-4cb2-be6a-1cd0d9f4ccd9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.295931] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1247.295931] env[65758]: value = "task-4661478" [ 1247.295931] env[65758]: _type = "Task" [ 1247.295931] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.309941] env[65758]: DEBUG oslo_vmware.api [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.314279] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.359652] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661471, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.90719} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.360155] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b/2bfca515-f4cb-4781-8423-aebf9477a69b.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1247.360155] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1247.360460] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af899738-29fd-44c0-9484-d2465ec3231e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.371121] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1247.371121] env[65758]: value = "task-4661479" [ 1247.371121] env[65758]: _type = "Task" [ 1247.371121] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.386516] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661479, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.420674] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661475, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.542644] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661469, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.774017] env[65758]: DEBUG nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1247.807834] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1247.808136] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1247.808363] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1247.808464] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1247.808611] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1247.808751] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1247.808964] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1247.809213] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1247.809567] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1247.809774] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1247.809953] env[65758]: DEBUG nova.virt.hardware [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1247.810902] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5583bacf-6990-433a-bf17-342a48632606 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.817978] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661478, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.827337] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bc293b-e3b2-4c29-9aa4-c4733aaf0cb5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.832187] env[65758]: DEBUG oslo_vmware.api [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.882404] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661479, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112561} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.882701] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1247.883693] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d2769c-1d3b-430a-b9b0-027e5d27e23e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.918630] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b/2bfca515-f4cb-4781-8423-aebf9477a69b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1247.919387] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-705cb5f5-fe5c-4dce-9eeb-ad12e561d694 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.960072] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661475, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.961983] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1247.961983] env[65758]: value = "task-4661480" [ 1247.961983] env[65758]: _type = "Task" [ 1247.961983] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.972463] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661480, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.042545] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661469, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.729152} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.042821] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/81bd8cd1-7783-4705-af00-19222b217ece/81bd8cd1-7783-4705-af00-19222b217ece.vmdk to [datastore1] bc10286b-195f-48a2-b16c-f8f925ec7a2a/bc10286b-195f-48a2-b16c-f8f925ec7a2a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1248.043693] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d650caf0-86cd-4157-9e42-40f8ff3bb59c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.069021] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] bc10286b-195f-48a2-b16c-f8f925ec7a2a/bc10286b-195f-48a2-b16c-f8f925ec7a2a.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.069393] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b12d62f-e983-42dd-8d68-b634396ccfcf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.091703] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1248.091703] env[65758]: value = "task-4661481" [ 1248.091703] env[65758]: _type = "Task" [ 1248.091703] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.102155] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661481, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.222310] env[65758]: DEBUG nova.compute.manager [req-73ddee7c-e933-47df-910c-7a289f9da1ea req-b0a124c6-fa03-475f-ba7c-9b662932efc7 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Received event network-vif-plugged-61a8c45c-30ac-46ef-869c-09bcc14a67df {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1248.222598] env[65758]: DEBUG oslo_concurrency.lockutils [req-73ddee7c-e933-47df-910c-7a289f9da1ea req-b0a124c6-fa03-475f-ba7c-9b662932efc7 service nova] Acquiring lock "c97f02fc-a244-40e9-97b3-8cbbf516607a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.222751] env[65758]: DEBUG oslo_concurrency.lockutils [req-73ddee7c-e933-47df-910c-7a289f9da1ea req-b0a124c6-fa03-475f-ba7c-9b662932efc7 service nova] Lock "c97f02fc-a244-40e9-97b3-8cbbf516607a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.222920] env[65758]: DEBUG oslo_concurrency.lockutils [req-73ddee7c-e933-47df-910c-7a289f9da1ea req-b0a124c6-fa03-475f-ba7c-9b662932efc7 service nova] Lock "c97f02fc-a244-40e9-97b3-8cbbf516607a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.223182] env[65758]: DEBUG nova.compute.manager [req-73ddee7c-e933-47df-910c-7a289f9da1ea req-b0a124c6-fa03-475f-ba7c-9b662932efc7 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] No waiting events found dispatching network-vif-plugged-61a8c45c-30ac-46ef-869c-09bcc14a67df {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1248.223397] env[65758]: WARNING nova.compute.manager [req-73ddee7c-e933-47df-910c-7a289f9da1ea req-b0a124c6-fa03-475f-ba7c-9b662932efc7 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Received unexpected event network-vif-plugged-61a8c45c-30ac-46ef-869c-09bcc14a67df for instance with vm_state building and task_state spawning. [ 1248.299816] env[65758]: DEBUG nova.network.neutron [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Successfully updated port: 61a8c45c-30ac-46ef-869c-09bcc14a67df {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1248.320928] env[65758]: DEBUG oslo_vmware.api [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Task: {'id': task-4661477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.147592} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.325495] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1248.325758] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1248.325954] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1248.326241] env[65758]: INFO nova.compute.manager [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Took 2.16 seconds to destroy the instance on the hypervisor. [ 1248.326532] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1248.327196] env[65758]: DEBUG oslo_vmware.api [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661478, 'name': PowerOnVM_Task, 'duration_secs': 0.757609} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.327420] env[65758]: DEBUG nova.compute.manager [-] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1248.327558] env[65758]: DEBUG nova.network.neutron [-] [instance: 1f773924-74ee-4151-81ba-d105ce225289] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1248.327902] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1248.328586] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1248.328917] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1248.336967] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1248.337266] env[65758]: INFO nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Took 9.71 seconds to spawn the instance on the hypervisor. [ 1248.337478] env[65758]: DEBUG nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1248.339280] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a93ee4-624a-4617-a5c1-d43e70992085 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.374514] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1248.418336] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1248.418590] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910093', 'volume_id': 'd86bceca-2f54-4f80-89ad-662fb3a8104a', 'name': 'volume-d86bceca-2f54-4f80-89ad-662fb3a8104a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9550f72-009c-4143-afe2-887727e5c071', 'attached_at': '', 'detached_at': '', 'volume_id': 'd86bceca-2f54-4f80-89ad-662fb3a8104a', 'serial': 'd86bceca-2f54-4f80-89ad-662fb3a8104a'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1248.419528] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c8991f-7267-4256-8950-eedb7fcbe832 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.444267] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254acb21-6ed7-48e2-9854-d51e7e51131a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.447414] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661475, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.68353} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.447710] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e/45c9678b-0478-4192-8684-3b6fb0f4831e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1248.447926] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1248.448667] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b2fc692-dfed-4bd0-a3fa-817c2ca52709 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.476561] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] volume-d86bceca-2f54-4f80-89ad-662fb3a8104a/volume-d86bceca-2f54-4f80-89ad-662fb3a8104a.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.481092] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-253cc77a-7d74-44d0-8fcd-0f47162d0c3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.494014] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1248.494014] env[65758]: value = "task-4661482" [ 1248.494014] env[65758]: _type = "Task" [ 1248.494014] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.501584] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661480, 'name': ReconfigVM_Task, 'duration_secs': 0.36196} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.503421] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b/2bfca515-f4cb-4781-8423-aebf9477a69b.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1248.504185] env[65758]: DEBUG oslo_vmware.api [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1248.504185] env[65758]: value = "task-4661483" [ 1248.504185] env[65758]: _type = "Task" [ 1248.504185] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.504496] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4197d6cc-10f6-48a5-8f4d-a7132fd947fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.513971] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661482, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.522027] env[65758]: DEBUG oslo_vmware.api [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.522448] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1248.522448] env[65758]: value = "task-4661484" [ 1248.522448] env[65758]: _type = "Task" [ 1248.522448] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.534470] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661484, 'name': Rename_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.602344] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661481, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.802545] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "refresh_cache-c97f02fc-a244-40e9-97b3-8cbbf516607a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.802755] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "refresh_cache-c97f02fc-a244-40e9-97b3-8cbbf516607a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.802925] env[65758]: DEBUG nova.network.neutron [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1248.824954] env[65758]: DEBUG nova.compute.manager [req-48dc2679-71a4-4b2a-b071-8c45f45c84ae req-b0fd480d-8eca-44a0-b173-3056c7303b52 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Received event network-vif-deleted-084d10cd-9734-4baf-91b3-892d54084a42 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1248.824954] env[65758]: INFO nova.compute.manager [req-48dc2679-71a4-4b2a-b071-8c45f45c84ae req-b0fd480d-8eca-44a0-b173-3056c7303b52 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Neutron deleted interface 084d10cd-9734-4baf-91b3-892d54084a42; detaching it from the instance and deleting it from the info cache [ 1248.824954] env[65758]: DEBUG nova.network.neutron [req-48dc2679-71a4-4b2a-b071-8c45f45c84ae req-b0fd480d-8eca-44a0-b173-3056c7303b52 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1248.862345] env[65758]: INFO nova.compute.manager [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Took 16.28 seconds to build instance. [ 1249.008265] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661482, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076063} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.013021] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1249.013021] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e53303e-f191-4bec-bc41-a4bf684020ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.029751] env[65758]: DEBUG oslo_vmware.api [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.040063] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e/45c9678b-0478-4192-8684-3b6fb0f4831e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1249.043441] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9907ac6-7f34-44e0-b743-c873b39a2034 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.064612] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661484, 'name': Rename_Task, 'duration_secs': 0.16937} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.066152] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1249.066607] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1249.066607] env[65758]: value = "task-4661485" [ 1249.066607] env[65758]: _type = "Task" [ 1249.066607] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.066822] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e679b30b-2ede-41c9-94b9-d031c1e44cce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.078264] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.079737] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1249.079737] env[65758]: value = "task-4661486" [ 1249.079737] env[65758]: _type = "Task" [ 1249.079737] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.088840] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.104529] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661481, 'name': ReconfigVM_Task, 'duration_secs': 0.948145} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.104836] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Reconfigured VM instance instance-0000006b to attach disk [datastore1] bc10286b-195f-48a2-b16c-f8f925ec7a2a/bc10286b-195f-48a2-b16c-f8f925ec7a2a.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.105607] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33f8aae5-2d71-488b-a0ca-7d8f35831864 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.113465] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1249.113465] env[65758]: value = "task-4661487" [ 1249.113465] env[65758]: _type = "Task" [ 1249.113465] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.123408] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661487, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.305392] env[65758]: DEBUG nova.network.neutron [-] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1249.308098] env[65758]: WARNING openstack [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1249.308522] env[65758]: WARNING openstack [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1249.328665] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d00093a-1293-439f-9aca-b555f7b25303 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.344254] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebc1c1b-4550-45d7-ad90-03c7d9b1ae55 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.356632] env[65758]: DEBUG nova.network.neutron [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1249.366016] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5b175b72-2f23-432a-9c0a-343d604caed3 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.795s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.382776] env[65758]: DEBUG nova.compute.manager [req-48dc2679-71a4-4b2a-b071-8c45f45c84ae req-b0fd480d-8eca-44a0-b173-3056c7303b52 service nova] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Detach interface failed, port_id=084d10cd-9734-4baf-91b3-892d54084a42, reason: Instance 1f773924-74ee-4151-81ba-d105ce225289 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1249.454156] env[65758]: WARNING neutronclient.v2_0.client [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1249.454156] env[65758]: WARNING openstack [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1249.454156] env[65758]: WARNING openstack [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1249.519717] env[65758]: DEBUG oslo_vmware.api [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661483, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.579275] env[65758]: DEBUG nova.network.neutron [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Updating instance_info_cache with network_info: [{"id": "61a8c45c-30ac-46ef-869c-09bcc14a67df", "address": "fa:16:3e:e5:62:ef", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a8c45c-30", "ovs_interfaceid": "61a8c45c-30ac-46ef-869c-09bcc14a67df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1249.588378] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.595181] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661486, 'name': PowerOnVM_Task} progress is 88%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.627022] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661487, 'name': Rename_Task, 'duration_secs': 0.371499} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.627022] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1249.627022] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b646512-fa50-4b03-ad7c-892a08e83f46 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.632803] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1249.632803] env[65758]: value = "task-4661488" [ 1249.632803] env[65758]: _type = "Task" [ 1249.632803] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.643040] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.822045] env[65758]: INFO nova.compute.manager [-] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Took 1.49 seconds to deallocate network for instance. [ 1250.020508] env[65758]: DEBUG oslo_vmware.api [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661483, 'name': ReconfigVM_Task, 'duration_secs': 1.043266} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.020914] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Reconfigured VM instance instance-00000070 to attach disk [datastore2] volume-d86bceca-2f54-4f80-89ad-662fb3a8104a/volume-d86bceca-2f54-4f80-89ad-662fb3a8104a.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1250.025910] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-499c842c-04f8-4af9-bc4c-d3d455c67207 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.044177] env[65758]: DEBUG oslo_vmware.api [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1250.044177] env[65758]: value = "task-4661489" [ 1250.044177] env[65758]: _type = "Task" [ 1250.044177] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.055540] env[65758]: DEBUG oslo_vmware.api [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661489, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.079351] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661485, 'name': ReconfigVM_Task, 'duration_secs': 0.576263} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.079606] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e/45c9678b-0478-4192-8684-3b6fb0f4831e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1250.080274] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3648f9e1-b96e-4a0e-b1d6-d69240226c6f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.094042] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "refresh_cache-c97f02fc-a244-40e9-97b3-8cbbf516607a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.094498] env[65758]: DEBUG nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Instance network_info: |[{"id": "61a8c45c-30ac-46ef-869c-09bcc14a67df", "address": "fa:16:3e:e5:62:ef", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a8c45c-30", "ovs_interfaceid": "61a8c45c-30ac-46ef-869c-09bcc14a67df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1250.094831] env[65758]: DEBUG oslo_vmware.api [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661486, 'name': PowerOnVM_Task, 'duration_secs': 0.647988} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.096357] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:62:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61a8c45c-30ac-46ef-869c-09bcc14a67df', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1250.104083] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1250.104490] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1250.104490] env[65758]: INFO nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1250.104669] env[65758]: DEBUG nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1250.104997] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1250.104997] env[65758]: value = "task-4661490" [ 1250.104997] env[65758]: _type = "Task" [ 1250.104997] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.105236] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1250.105938] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0550db4e-3838-4ff0-844a-af3910928dee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.108624] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d034c3d8-8d45-4448-84be-2bed52e8e506 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.139390] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1250.139390] env[65758]: value = "task-4661491" [ 1250.139390] env[65758]: _type = "Task" [ 1250.139390] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.139390] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661490, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.152336] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661488, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.156151] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661491, 'name': CreateVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.258609] env[65758]: DEBUG nova.compute.manager [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Received event network-changed-61a8c45c-30ac-46ef-869c-09bcc14a67df {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1250.258609] env[65758]: DEBUG nova.compute.manager [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Refreshing instance network info cache due to event network-changed-61a8c45c-30ac-46ef-869c-09bcc14a67df. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1250.258609] env[65758]: DEBUG oslo_concurrency.lockutils [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] Acquiring lock "refresh_cache-c97f02fc-a244-40e9-97b3-8cbbf516607a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.259052] env[65758]: DEBUG oslo_concurrency.lockutils [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] Acquired lock "refresh_cache-c97f02fc-a244-40e9-97b3-8cbbf516607a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.259052] env[65758]: DEBUG nova.network.neutron [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Refreshing network info cache for port 61a8c45c-30ac-46ef-869c-09bcc14a67df {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1250.330062] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.330434] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.330777] env[65758]: DEBUG nova.objects.instance [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lazy-loading 'resources' on Instance uuid 1f773924-74ee-4151-81ba-d105ce225289 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.478511] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.555350] env[65758]: DEBUG oslo_vmware.api [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661489, 'name': ReconfigVM_Task, 'duration_secs': 0.192064} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.555705] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910093', 'volume_id': 'd86bceca-2f54-4f80-89ad-662fb3a8104a', 'name': 'volume-d86bceca-2f54-4f80-89ad-662fb3a8104a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9550f72-009c-4143-afe2-887727e5c071', 'attached_at': '', 'detached_at': '', 'volume_id': 'd86bceca-2f54-4f80-89ad-662fb3a8104a', 'serial': 'd86bceca-2f54-4f80-89ad-662fb3a8104a'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1250.620281] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661490, 'name': Rename_Task, 'duration_secs': 0.152613} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.620512] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1250.620820] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6e36f55-14c2-40e1-a9dc-d54a3c61afea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.628995] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1250.628995] env[65758]: value = "task-4661492" [ 1250.628995] env[65758]: _type = "Task" [ 1250.628995] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.643423] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.655042] env[65758]: INFO nova.compute.manager [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Took 15.57 seconds to build instance. [ 1250.661149] env[65758]: DEBUG oslo_vmware.api [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661488, 'name': PowerOnVM_Task, 'duration_secs': 0.760045} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.664506] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1250.666931] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661491, 'name': CreateVM_Task, 'duration_secs': 0.522629} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.667338] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1250.668182] env[65758]: WARNING neutronclient.v2_0.client [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1250.668818] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.668818] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.668999] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1250.669362] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-163d248b-fb4d-4719-b727-3f9d4d415219 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.674662] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1250.674662] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52bcf16f-b90a-5df6-38d5-4f0397728866" [ 1250.674662] env[65758]: _type = "Task" [ 1250.674662] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.685911] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bcf16f-b90a-5df6-38d5-4f0397728866, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.762919] env[65758]: WARNING neutronclient.v2_0.client [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1250.763388] env[65758]: WARNING openstack [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1250.763913] env[65758]: WARNING openstack [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1250.772595] env[65758]: DEBUG nova.compute.manager [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1250.773625] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89a227c-356b-4c53-95e8-1d35c76388f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.854785] env[65758]: DEBUG nova.compute.manager [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received event network-changed-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1250.854993] env[65758]: DEBUG nova.compute.manager [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Refreshing instance network info cache due to event network-changed-61227a3e-82c2-4ebf-b71b-b953b5667f90. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1250.855614] env[65758]: DEBUG oslo_concurrency.lockutils [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] Acquiring lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.855776] env[65758]: DEBUG oslo_concurrency.lockutils [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] Acquired lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.855943] env[65758]: DEBUG nova.network.neutron [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Refreshing network info cache for port 61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1250.960800] env[65758]: WARNING neutronclient.v2_0.client [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1250.961489] env[65758]: WARNING openstack [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1250.961841] env[65758]: WARNING openstack [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1250.982403] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.010020] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de7dc51-585d-412e-baff-679276b74f58 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.023735] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2532d007-4af4-4322-a0b1-c4e2feca11ee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.056161] env[65758]: INFO nova.compute.manager [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Rescuing [ 1251.056730] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.057129] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.057502] env[65758]: DEBUG nova.network.neutron [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1251.061753] env[65758]: DEBUG nova.network.neutron [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Updated VIF entry in instance network info cache for port 61a8c45c-30ac-46ef-869c-09bcc14a67df. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1251.062237] env[65758]: DEBUG nova.network.neutron [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Updating instance_info_cache with network_info: [{"id": "61a8c45c-30ac-46ef-869c-09bcc14a67df", "address": "fa:16:3e:e5:62:ef", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a8c45c-30", "ovs_interfaceid": "61a8c45c-30ac-46ef-869c-09bcc14a67df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1251.066729] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd3a4f5-a677-4dcd-b9a9-c59c1b487ac0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.076428] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6017e86-615c-4e71-b154-b49bd2d00ebe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.094072] env[65758]: DEBUG nova.compute.provider_tree [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.139281] env[65758]: DEBUG oslo_vmware.api [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661492, 'name': PowerOnVM_Task, 'duration_secs': 0.48375} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.139553] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1251.139762] env[65758]: INFO nova.compute.manager [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Took 7.90 seconds to spawn the instance on the hypervisor. [ 1251.139945] env[65758]: DEBUG nova.compute.manager [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1251.140748] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522af444-37a9-4447-ad1d-6d064d0135c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.156889] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6893b42d-1688-476f-a526-77e9bea20065 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "2bfca515-f4cb-4781-8423-aebf9477a69b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.085s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.186099] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52bcf16f-b90a-5df6-38d5-4f0397728866, 'name': SearchDatastore_Task, 'duration_secs': 0.011873} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.186514] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.186776] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1251.187017] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.187320] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.187320] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1251.187629] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c41712e0-eefe-4cfb-a8c4-9adac9699651 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.197906] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1251.198108] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1251.198901] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-969a9a0a-286e-43ac-98d1-1975cd899a54 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.205556] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1251.205556] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]529941d9-66ed-bc46-2695-ebb26792c813" [ 1251.205556] env[65758]: _type = "Task" [ 1251.205556] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.213554] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529941d9-66ed-bc46-2695-ebb26792c813, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.291397] env[65758]: DEBUG oslo_concurrency.lockutils [None req-f12924c0-1515-4649-b75c-ea3f35400ef7 tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.123s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.358922] env[65758]: WARNING neutronclient.v2_0.client [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1251.359606] env[65758]: WARNING openstack [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1251.360050] env[65758]: WARNING openstack [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1251.527631] env[65758]: WARNING neutronclient.v2_0.client [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1251.528293] env[65758]: WARNING openstack [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1251.528645] env[65758]: WARNING openstack [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1251.573931] env[65758]: WARNING neutronclient.v2_0.client [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1251.574603] env[65758]: WARNING openstack [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1251.575064] env[65758]: WARNING openstack [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1251.581761] env[65758]: DEBUG oslo_concurrency.lockutils [req-d989d141-9012-491c-8b65-2fb074d353bb req-3008036d-92d0-456b-a8d2-ec0f26327a19 service nova] Releasing lock "refresh_cache-c97f02fc-a244-40e9-97b3-8cbbf516607a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.597338] env[65758]: DEBUG nova.scheduler.client.report [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1251.613495] env[65758]: DEBUG nova.objects.instance [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'flavor' on Instance uuid a9550f72-009c-4143-afe2-887727e5c071 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.626883] env[65758]: DEBUG nova.network.neutron [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updated VIF entry in instance network info cache for port 61227a3e-82c2-4ebf-b71b-b953b5667f90. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1251.627269] env[65758]: DEBUG nova.network.neutron [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [{"id": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "address": "fa:16:3e:45:e5:9e", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61227a3e-82", "ovs_interfaceid": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1251.661359] env[65758]: INFO nova.compute.manager [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Took 12.79 seconds to build instance. [ 1251.716684] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]529941d9-66ed-bc46-2695-ebb26792c813, 'name': SearchDatastore_Task, 'duration_secs': 0.025866} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.717525] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b07b1fa4-7aa3-42ae-9e61-bc93637d5b30 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.722588] env[65758]: WARNING neutronclient.v2_0.client [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1251.723244] env[65758]: WARNING openstack [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1251.723608] env[65758]: WARNING openstack [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1251.732428] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1251.732428] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a2b0aa-93f4-ae8f-8bfb-2cf5e8a5f7b1" [ 1251.732428] env[65758]: _type = "Task" [ 1251.732428] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.741586] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a2b0aa-93f4-ae8f-8bfb-2cf5e8a5f7b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.814311] env[65758]: DEBUG nova.network.neutron [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Updating instance_info_cache with network_info: [{"id": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "address": "fa:16:3e:71:5a:a4", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf268cd74-fb", "ovs_interfaceid": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1252.106219] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.108589] env[65758]: INFO nova.compute.manager [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Rebuilding instance [ 1252.110663] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.128s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.110847] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.111216] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1252.111853] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a348812-b115-4556-a451-c7fabc9428ad {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.121424] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e44c03d0-cd1f-4986-a25d-cf99d8eb3387 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.326s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.123337] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50913a25-478f-47b6-ae83-471cb5832371 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.130511] env[65758]: DEBUG oslo_concurrency.lockutils [req-6bbb1fcd-ba2b-478e-a5de-466082326462 req-3f29c41e-04a4-45c4-a3b3-87c86a15018b service nova] Releasing lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.140323] env[65758]: INFO nova.scheduler.client.report [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Deleted allocations for instance 1f773924-74ee-4151-81ba-d105ce225289 [ 1252.143617] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722f5237-05eb-45c5-8a3b-e14463cf6e38 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.158941] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598ab2dc-3c65-4f85-9e9d-7b355d95f74c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.166241] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1a348737-6c37-4dca-a09d-5f34d5272ef4 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "45c9678b-0478-4192-8684-3b6fb0f4831e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.306s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.194699] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179268MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1252.194950] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.195251] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.201517] env[65758]: DEBUG nova.compute.manager [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1252.202452] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99a8b74-e9f9-409b-9c86-f091104aebc0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.244872] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a2b0aa-93f4-ae8f-8bfb-2cf5e8a5f7b1, 'name': SearchDatastore_Task, 'duration_secs': 0.021313} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.245167] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.245458] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] c97f02fc-a244-40e9-97b3-8cbbf516607a/c97f02fc-a244-40e9-97b3-8cbbf516607a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1252.245731] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-967e8437-766b-4b62-aa8a-cd97be13c94e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.254140] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1252.254140] env[65758]: value = "task-4661493" [ 1252.254140] env[65758]: _type = "Task" [ 1252.254140] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.264015] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.317222] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.653466] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c5c4bba2-9bfc-417d-9f47-d4e73f908b21 tempest-ServerActionsTestJSON-1163233672 tempest-ServerActionsTestJSON-1163233672-project-member] Lock "1f773924-74ee-4151-81ba-d105ce225289" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.992s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.768758] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661493, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.218151] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.218151] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-813d8fa8-0380-4235-a4ae-2137923d42a8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.225471] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1253.225471] env[65758]: value = "task-4661494" [ 1253.225471] env[65758]: _type = "Task" [ 1253.225471] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.226489] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance cdc1cfab-4f75-4caf-a4ee-8197af083353 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.226629] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a9550f72-009c-4143-afe2-887727e5c071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.226752] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 0f3ae822-4c4c-4dff-94d4-3416187d6d25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.226871] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.227085] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance bc10286b-195f-48a2-b16c-f8f925ec7a2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.227245] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.227412] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 2bfca515-f4cb-4781-8423-aebf9477a69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.227542] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 45c9678b-0478-4192-8684-3b6fb0f4831e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.227739] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance c97f02fc-a244-40e9-97b3-8cbbf516607a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1253.240103] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.265281] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661493, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542519} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.265594] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] c97f02fc-a244-40e9-97b3-8cbbf516607a/c97f02fc-a244-40e9-97b3-8cbbf516607a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1253.265829] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1253.266104] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0dc0e05b-a490-4720-8148-86a38e12c9a7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.274028] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1253.274028] env[65758]: value = "task-4661495" [ 1253.274028] env[65758]: _type = "Task" [ 1253.274028] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.285324] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.541584] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.541879] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.733492] env[65758]: INFO nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e505f8e8-0612-4fe7-bcd2-73fdd39458fa has allocations against this compute host but is not found in the database. [ 1253.733870] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1253.733870] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '9', 'num_vm_active': '7', 'num_task_None': '6', 'num_os_type_None': '9', 'num_proj_e2440f1694fe4b87a9827f6653ff2e4c': '1', 'io_workload': '3', 'num_proj_82bfbb5ee6714c9aa5119cb714d28ce2': '3', 'num_proj_f32b2100e0824c56ab852e0d1bb37e87': '1', 'num_vm_rescued': '1', 'num_proj_0a8729d781b1450e9b366785f96f9938': '2', 'num_proj_4095654557a34bb0907071aedb3bb678': '1', 'num_task_rescuing': '1', 'num_task_rebuilding': '1', 'num_proj_e65a59f8a24c480abf2ec74bcd486000': '1', 'num_vm_building': '1', 'num_task_spawning': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1253.742732] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661494, 'name': PowerOffVM_Task, 'duration_secs': 0.176112} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.743046] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1253.743305] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1253.744125] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0360a62-2b7a-46f1-8fa8-a725e9610a53 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.753287] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1253.753287] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84652d76-2247-4158-8f5d-ae9ccbf263e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.782972] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1253.783380] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1253.783603] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Deleting the datastore file [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.790539] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05f31dbb-946a-4c5e-8bce-7ab80176c5c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.792772] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069703} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.793066] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1253.794286] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f85be1c-7a9c-488f-b770-639486cc1277 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.798389] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1253.798389] env[65758]: value = "task-4661497" [ 1253.798389] env[65758]: _type = "Task" [ 1253.798389] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.824137] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] c97f02fc-a244-40e9-97b3-8cbbf516607a/c97f02fc-a244-40e9-97b3-8cbbf516607a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1253.827641] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fc92be8-7246-4312-bed3-b4fb249986b5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.851711] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.858686] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.859073] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1253.859073] env[65758]: value = "task-4661498" [ 1253.859073] env[65758]: _type = "Task" [ 1253.859073] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.859296] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da2b21d7-0a5a-4068-83d2-c8a6273ce0c7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.874166] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661498, 'name': ReconfigVM_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.876291] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1253.876291] env[65758]: value = "task-4661499" [ 1253.876291] env[65758]: _type = "Task" [ 1253.876291] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.886310] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661499, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.979584] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2191c8-c915-42b0-b719-04792ef9ed64 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.988614] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7847ae2c-7baa-41d0-9077-239115045680 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.024828] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc6a4da-f992-464f-a593-b2a874ba481e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.034035] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17093af9-9db5-46b5-9dbb-5462a8961cd9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.048744] env[65758]: DEBUG nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1254.054512] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1254.309265] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097247} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.309686] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.309915] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1254.310121] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1254.371505] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661498, 'name': ReconfigVM_Task, 'duration_secs': 0.360298} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.371786] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Reconfigured VM instance instance-00000078 to attach disk [datastore2] c97f02fc-a244-40e9-97b3-8cbbf516607a/c97f02fc-a244-40e9-97b3-8cbbf516607a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1254.372664] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6874ba52-9869-4561-a928-1bb0f412f9a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.383044] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1254.383044] env[65758]: value = "task-4661500" [ 1254.383044] env[65758]: _type = "Task" [ 1254.383044] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.390456] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661499, 'name': PowerOffVM_Task, 'duration_secs': 0.189767} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.391196] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1254.391926] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a753609-de0a-4f7d-b104-2a1d31c4fd5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.397989] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661500, 'name': Rename_Task} progress is 10%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.415283] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af2f402-be8a-42f2-8f09-098bae2d5a45 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.452016] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1254.452016] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cc3f198-f850-480c-9ca6-5b165d4942be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.461106] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1254.461106] env[65758]: value = "task-4661501" [ 1254.461106] env[65758]: _type = "Task" [ 1254.461106] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.471412] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1254.471640] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1254.471958] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.472139] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.472332] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1254.472609] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7db9fc2a-4185-41bf-9041-2d9aa1ee5b09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.483356] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1254.483535] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1254.484521] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-440299c3-1a88-4fc4-931a-b99b135e0183 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.490721] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1254.490721] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5268d89d-80c3-ae5d-609f-fdeeae5216dc" [ 1254.490721] env[65758]: _type = "Task" [ 1254.490721] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.500491] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5268d89d-80c3-ae5d-609f-fdeeae5216dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.559843] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1254.581358] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.894845] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661500, 'name': Rename_Task, 'duration_secs': 0.143978} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.895175] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1254.895290] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3fe3893-86b3-4cb3-8efd-edc6cf8b03c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.902392] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1254.902392] env[65758]: value = "task-4661502" [ 1254.902392] env[65758]: _type = "Task" [ 1254.902392] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.912961] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661502, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.003810] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5268d89d-80c3-ae5d-609f-fdeeae5216dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010704} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.004202] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91c69591-25c9-4f40-8e65-21875e9f06ae {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.010422] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1255.010422] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d4b4bf-a8aa-d921-e9a5-d1aa672c273e" [ 1255.010422] env[65758]: _type = "Task" [ 1255.010422] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.019336] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d4b4bf-a8aa-d921-e9a5-d1aa672c273e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.066545] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1255.066829] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.872s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.067127] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.486s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.069105] env[65758]: INFO nova.compute.claims [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1255.353896] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1255.354262] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1255.354332] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1255.354502] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1255.354651] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1255.354800] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1255.354997] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1255.355169] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1255.355374] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1255.355585] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1255.355810] env[65758]: DEBUG nova.virt.hardware [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1255.356790] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12758cd7-67b2-409b-9aea-322798eb3ca8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.366039] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81132cae-3368-4d0b-ad10-6460eb405c54 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.381426] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1255.387270] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1255.387606] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1255.387851] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d02f80e9-abb3-41ee-abf8-68968c6ebb09 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.408147] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1255.408147] env[65758]: value = "task-4661503" [ 1255.408147] env[65758]: _type = "Task" [ 1255.408147] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.414164] env[65758]: DEBUG oslo_vmware.api [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661502, 'name': PowerOnVM_Task, 'duration_secs': 0.490278} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.414845] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1255.415096] env[65758]: INFO nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1255.415329] env[65758]: DEBUG nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1255.416102] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5faab133-4ed1-4064-9d2b-be33a0dbf39b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.421528] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661503, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.522905] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d4b4bf-a8aa-d921-e9a5-d1aa672c273e, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.523253] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.523472] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. {{(pid=65758) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1255.523752] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ab3bd93-c5d7-4fe6-a7bc-cc6f9d3a5864 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.531445] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1255.531445] env[65758]: value = "task-4661504" [ 1255.531445] env[65758]: _type = "Task" [ 1255.531445] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.541115] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.922811] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661503, 'name': CreateVM_Task, 'duration_secs': 0.341542} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.923503] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1255.923690] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.923936] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.924407] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1255.924826] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a757a6e0-71cb-43ee-be8f-753a31b82572 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.937616] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1255.937616] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a3ff37-f87f-0bd7-60ae-27861975891d" [ 1255.937616] env[65758]: _type = "Task" [ 1255.937616] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.943651] env[65758]: INFO nova.compute.manager [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Took 12.46 seconds to build instance. [ 1255.952411] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a3ff37-f87f-0bd7-60ae-27861975891d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.055491] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661504, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.072254] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.072495] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.072644] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.072790] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.072930] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.073084] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.073239] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.073388] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1256.229098] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1e5cf4-b8a8-46af-b847-6e6f889cde3a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.237575] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9960caea-7d90-4466-ace3-3c40c9e3ec8a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.269301] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c3f742-efd4-47a7-b5b6-08242bc10afc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.278050] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be98d35-b424-471d-8734-1943ef842879 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.293350] env[65758]: DEBUG nova.compute.provider_tree [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1256.445918] env[65758]: DEBUG oslo_concurrency.lockutils [None req-46d5d94a-abe6-4917-bc16-e21df0511ee3 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "c97f02fc-a244-40e9-97b3-8cbbf516607a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.969s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.450886] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a3ff37-f87f-0bd7-60ae-27861975891d, 'name': SearchDatastore_Task, 'duration_secs': 0.055544} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.451200] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.451427] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1256.451655] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.451795] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.451966] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1256.452239] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edb2cdf9-a499-45a3-b886-326cf4912f47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.461110] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1256.461293] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1256.461981] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4404c533-f72c-489f-94ab-c4aadd5bdae7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.467484] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1256.467484] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]523228ff-d647-4405-7358-7ac07f680fc5" [ 1256.467484] env[65758]: _type = "Task" [ 1256.467484] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.475427] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523228ff-d647-4405-7358-7ac07f680fc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.552816] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661504, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530077} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.553143] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk. [ 1256.553956] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689e520f-430a-4627-8427-eae93e8cc2db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.579884] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.580231] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e605b73d-4a9c-4990-a995-f11a665ee323 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.599074] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1256.599074] env[65758]: value = "task-4661505" [ 1256.599074] env[65758]: _type = "Task" [ 1256.599074] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.608524] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661505, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.816187] env[65758]: ERROR nova.scheduler.client.report [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [req-a549f103-6d56-4e31-b9ae-a0a19330677a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a549f103-6d56-4e31-b9ae-a0a19330677a"}]} [ 1256.834082] env[65758]: DEBUG nova.scheduler.client.report [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Refreshing inventories for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1256.848899] env[65758]: DEBUG nova.scheduler.client.report [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Updating ProviderTree inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1256.849143] env[65758]: DEBUG nova.compute.provider_tree [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1256.862103] env[65758]: DEBUG nova.scheduler.client.report [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Refreshing aggregate associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, aggregates: None {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1256.882836] env[65758]: DEBUG nova.scheduler.client.report [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Refreshing trait associations for resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=65758) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1256.981543] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]523228ff-d647-4405-7358-7ac07f680fc5, 'name': SearchDatastore_Task, 'duration_secs': 0.008971} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.982387] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f277ca1c-11a2-48b8-970f-aea386680e82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.990469] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1256.990469] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520fb890-6efc-d358-2503-741a348c1d53" [ 1256.990469] env[65758]: _type = "Task" [ 1256.990469] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.000793] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520fb890-6efc-d358-2503-741a348c1d53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.026269] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874a3e5b-e6c3-426b-bfd1-bbcd1a91bb00 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.036273] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c4568c-5d0b-432d-beda-bf11d1c60e24 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.071829] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6429fc-7b8f-4a34-a7ea-29dd929170bd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.080185] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1eeb85-b546-4730-ab85-aa4ca6b5e21b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.095442] env[65758]: DEBUG nova.compute.provider_tree [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1257.109268] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661505, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.219126] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "c97f02fc-a244-40e9-97b3-8cbbf516607a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.219405] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "c97f02fc-a244-40e9-97b3-8cbbf516607a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.219619] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "c97f02fc-a244-40e9-97b3-8cbbf516607a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.219800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "c97f02fc-a244-40e9-97b3-8cbbf516607a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.219968] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "c97f02fc-a244-40e9-97b3-8cbbf516607a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.222184] env[65758]: INFO nova.compute.manager [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Terminating instance [ 1257.506384] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520fb890-6efc-d358-2503-741a348c1d53, 'name': SearchDatastore_Task, 'duration_secs': 0.011887} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.506781] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.507304] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e/45c9678b-0478-4192-8684-3b6fb0f4831e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1257.508014] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1da5f1ae-aebd-4f43-b26d-81096d6ea559 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.517099] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1257.517099] env[65758]: value = "task-4661506" [ 1257.517099] env[65758]: _type = "Task" [ 1257.517099] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.529514] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.612634] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661505, 'name': ReconfigVM_Task, 'duration_secs': 1.012962} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.612951] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b/75a6399b-5100-4c51-b5cf-162bd505a28f-rescue.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.613979] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c372c9-3c01-4914-a54e-e8164f8aee01 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.641247] env[65758]: DEBUG nova.scheduler.client.report [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 170 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1257.641528] env[65758]: DEBUG nova.compute.provider_tree [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 170 to 171 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1257.641720] env[65758]: DEBUG nova.compute.provider_tree [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1257.644895] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adc21bbd-b4f0-4c42-a6e4-8f28b2bb7f32 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.662568] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1257.662568] env[65758]: value = "task-4661507" [ 1257.662568] env[65758]: _type = "Task" [ 1257.662568] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.672582] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661507, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.726370] env[65758]: DEBUG nova.compute.manager [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1257.726707] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1257.727753] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c87295-7a26-4271-a293-b7418d50c05e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.738297] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1257.738606] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4450ead-85d6-4f11-86bc-801823ad9ebe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.747378] env[65758]: DEBUG oslo_vmware.api [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1257.747378] env[65758]: value = "task-4661508" [ 1257.747378] env[65758]: _type = "Task" [ 1257.747378] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.760943] env[65758]: DEBUG oslo_vmware.api [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.028681] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661506, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491217} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.029081] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e/45c9678b-0478-4192-8684-3b6fb0f4831e.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1258.029274] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1258.029514] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f566f16-9571-4066-9307-2b67f8aeeb31 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.038236] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1258.038236] env[65758]: value = "task-4661509" [ 1258.038236] env[65758]: _type = "Task" [ 1258.038236] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.049642] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.157307] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.090s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.157855] env[65758]: DEBUG nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1258.172311] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661507, 'name': ReconfigVM_Task, 'duration_secs': 0.208265} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.172512] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1258.172798] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14569340-bea8-4ae6-b3c1-fba788e9a99e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.179952] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1258.179952] env[65758]: value = "task-4661510" [ 1258.179952] env[65758]: _type = "Task" [ 1258.179952] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.189591] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.259064] env[65758]: DEBUG oslo_vmware.api [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661508, 'name': PowerOffVM_Task, 'duration_secs': 0.219814} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.259064] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1258.259257] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1258.259381] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9af889f-ec82-4bf7-b38d-ca2c84267663 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.334807] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1258.335165] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1258.335471] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleting the datastore file [datastore2] c97f02fc-a244-40e9-97b3-8cbbf516607a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1258.335768] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-857d54d6-c19c-4055-8800-d21783443dd2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.344561] env[65758]: DEBUG oslo_vmware.api [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1258.344561] env[65758]: value = "task-4661512" [ 1258.344561] env[65758]: _type = "Task" [ 1258.344561] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.354465] env[65758]: DEBUG oslo_vmware.api [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.549520] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190123} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.549827] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1258.550773] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ba579a-3590-4f5d-baed-db974c18436a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.576271] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e/45c9678b-0478-4192-8684-3b6fb0f4831e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1258.576692] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7796b8b2-eff8-4e01-8e44-69ad2baa7296 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.598662] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1258.598662] env[65758]: value = "task-4661513" [ 1258.598662] env[65758]: _type = "Task" [ 1258.598662] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.609208] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661513, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.663188] env[65758]: DEBUG nova.compute.utils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1258.665415] env[65758]: DEBUG nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1258.665656] env[65758]: DEBUG nova.network.neutron [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1258.665992] env[65758]: WARNING neutronclient.v2_0.client [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1258.666343] env[65758]: WARNING neutronclient.v2_0.client [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1258.666992] env[65758]: WARNING openstack [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1258.667369] env[65758]: WARNING openstack [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1258.691684] env[65758]: DEBUG oslo_vmware.api [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661510, 'name': PowerOnVM_Task, 'duration_secs': 0.465444} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.691966] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.694772] env[65758]: DEBUG nova.compute.manager [None req-b7b374e1-141d-4509-8d81-c81a28d04ef8 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1258.695642] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5014b2-79a3-4c81-b486-922cd3d97926 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.714574] env[65758]: DEBUG nova.policy [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcb6cf498b804adb971dd7e1722c277b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f32b2100e0824c56ab852e0d1bb37e87', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1258.855724] env[65758]: DEBUG oslo_vmware.api [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162567} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.856014] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1258.856225] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1258.856392] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1258.856568] env[65758]: INFO nova.compute.manager [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1258.856875] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1258.857105] env[65758]: DEBUG nova.compute.manager [-] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1258.857205] env[65758]: DEBUG nova.network.neutron [-] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1258.857449] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1258.858016] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1258.858290] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1258.896926] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1259.121502] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.161737] env[65758]: DEBUG nova.compute.manager [req-1ba158b3-8f8f-4471-b8ca-6017e87564ea req-1757640e-ae33-4528-8df2-ab6568f33979 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Received event network-vif-deleted-61a8c45c-30ac-46ef-869c-09bcc14a67df {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1259.162208] env[65758]: INFO nova.compute.manager [req-1ba158b3-8f8f-4471-b8ca-6017e87564ea req-1757640e-ae33-4528-8df2-ab6568f33979 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Neutron deleted interface 61a8c45c-30ac-46ef-869c-09bcc14a67df; detaching it from the instance and deleting it from the info cache [ 1259.164103] env[65758]: DEBUG nova.network.neutron [req-1ba158b3-8f8f-4471-b8ca-6017e87564ea req-1757640e-ae33-4528-8df2-ab6568f33979 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1259.165927] env[65758]: DEBUG nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1259.441833] env[65758]: DEBUG nova.network.neutron [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Successfully created port: 72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1259.612235] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661513, 'name': ReconfigVM_Task, 'duration_secs': 0.737368} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.612551] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e/45c9678b-0478-4192-8684-3b6fb0f4831e.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1259.613506] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33bd7695-16b1-49e0-85e8-348f199a9dbc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.622212] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1259.622212] env[65758]: value = "task-4661514" [ 1259.622212] env[65758]: _type = "Task" [ 1259.622212] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.625400] env[65758]: DEBUG nova.network.neutron [-] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1259.632458] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661514, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.675869] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c29d50aa-5533-49be-aec3-028834101671 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.691021] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f7c2ce-6350-4ad9-872c-4fe49763b087 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.724042] env[65758]: DEBUG nova.compute.manager [req-1ba158b3-8f8f-4471-b8ca-6017e87564ea req-1757640e-ae33-4528-8df2-ab6568f33979 service nova] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Detach interface failed, port_id=61a8c45c-30ac-46ef-869c-09bcc14a67df, reason: Instance c97f02fc-a244-40e9-97b3-8cbbf516607a could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1260.128099] env[65758]: INFO nova.compute.manager [-] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Took 1.27 seconds to deallocate network for instance. [ 1260.134694] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661514, 'name': Rename_Task, 'duration_secs': 0.341341} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.137519] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1260.137929] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4783cc5c-70fd-4e03-ae52-fc5cd1190f9c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.144349] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1260.144349] env[65758]: value = "task-4661515" [ 1260.144349] env[65758]: _type = "Task" [ 1260.144349] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.152724] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661515, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.182771] env[65758]: DEBUG nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1260.208314] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1260.208614] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1260.208768] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1260.209899] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1260.210120] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1260.210213] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1260.210424] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1260.210582] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1260.210746] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1260.210988] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1260.211234] env[65758]: DEBUG nova.virt.hardware [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1260.212148] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e7014e-3767-475a-a51a-e3773208318a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.215435] env[65758]: INFO nova.compute.manager [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Unrescuing [ 1260.215671] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.215815] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquired lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1260.216038] env[65758]: DEBUG nova.network.neutron [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1260.223512] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370e2726-5a2d-4a23-81a9-f499c7935887 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.640117] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.640347] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.640474] env[65758]: DEBUG nova.objects.instance [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lazy-loading 'resources' on Instance uuid c97f02fc-a244-40e9-97b3-8cbbf516607a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1260.656225] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661515, 'name': PowerOnVM_Task} progress is 66%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.718649] env[65758]: WARNING neutronclient.v2_0.client [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1260.719447] env[65758]: WARNING openstack [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1260.719812] env[65758]: WARNING openstack [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1260.888255] env[65758]: WARNING neutronclient.v2_0.client [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1260.889134] env[65758]: WARNING openstack [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1260.889486] env[65758]: WARNING openstack [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1260.923967] env[65758]: DEBUG nova.network.neutron [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Successfully updated port: 72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1260.983884] env[65758]: DEBUG nova.network.neutron [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Updating instance_info_cache with network_info: [{"id": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "address": "fa:16:3e:71:5a:a4", "network": {"id": "ce79fd19-09d4-443b-adeb-953966156797", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1292903758-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0a8729d781b1450e9b366785f96f9938", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d22cb4ec-277f-41ee-8aba-b3d54442b93d", "external-id": "nsx-vlan-transportzone-652", "segmentation_id": 652, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf268cd74-fb", "ovs_interfaceid": "f268cd74-fb17-4936-92b2-939e07f2fdfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1261.154924] env[65758]: DEBUG oslo_vmware.api [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661515, 'name': PowerOnVM_Task, 'duration_secs': 1.0074} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.155347] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1261.155525] env[65758]: DEBUG nova.compute.manager [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1261.156298] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ac05d8-a4ab-46bd-a6a7-fce468bbb3b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.188028] env[65758]: DEBUG nova.compute.manager [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Received event network-vif-plugged-72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1261.188028] env[65758]: DEBUG oslo_concurrency.lockutils [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] Acquiring lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.188028] env[65758]: DEBUG oslo_concurrency.lockutils [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] Lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.188028] env[65758]: DEBUG oslo_concurrency.lockutils [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] Lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.188028] env[65758]: DEBUG nova.compute.manager [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] No waiting events found dispatching network-vif-plugged-72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1261.188028] env[65758]: WARNING nova.compute.manager [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Received unexpected event network-vif-plugged-72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 for instance with vm_state building and task_state spawning. [ 1261.188028] env[65758]: DEBUG nova.compute.manager [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Received event network-changed-72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1261.188425] env[65758]: DEBUG nova.compute.manager [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Refreshing instance network info cache due to event network-changed-72e1a94b-418a-4f03-a5c3-8876b1d7f3d3. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1261.188425] env[65758]: DEBUG oslo_concurrency.lockutils [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] Acquiring lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.188425] env[65758]: DEBUG oslo_concurrency.lockutils [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] Acquired lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.188678] env[65758]: DEBUG nova.network.neutron [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Refreshing network info cache for port 72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1261.287829] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c449689d-4e1a-4db1-901b-eff0eeb92a76 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.296730] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74a1add-f5e4-41f7-9693-680a3e0d3386 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.331087] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46d6d12-97da-4a06-8716-6168bf2f0b5b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.341010] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bae0e9a-fdc3-48d5-ab0f-2b5f99d0d848 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.357300] env[65758]: DEBUG nova.compute.provider_tree [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.433295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.486965] env[65758]: DEBUG oslo_concurrency.lockutils [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Releasing lock "refresh_cache-2bfca515-f4cb-4781-8423-aebf9477a69b" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.487679] env[65758]: DEBUG nova.objects.instance [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lazy-loading 'flavor' on Instance uuid 2bfca515-f4cb-4781-8423-aebf9477a69b {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1261.674014] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.691678] env[65758]: WARNING neutronclient.v2_0.client [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1261.692440] env[65758]: WARNING openstack [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1261.692829] env[65758]: WARNING openstack [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1261.732013] env[65758]: DEBUG nova.network.neutron [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1261.879243] env[65758]: DEBUG nova.network.neutron [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1261.879243] env[65758]: DEBUG nova.scheduler.client.report [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1261.944030] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "45c9678b-0478-4192-8684-3b6fb0f4831e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.944388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "45c9678b-0478-4192-8684-3b6fb0f4831e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.944550] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "45c9678b-0478-4192-8684-3b6fb0f4831e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.944752] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "45c9678b-0478-4192-8684-3b6fb0f4831e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.944975] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "45c9678b-0478-4192-8684-3b6fb0f4831e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.951030] env[65758]: INFO nova.compute.manager [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Terminating instance [ 1261.994122] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99af11ed-98d5-4b25-b387-96119e8340f3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.022066] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1262.022442] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d9febd3-bc44-411f-8a04-38ec07cb6d28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.030961] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1262.030961] env[65758]: value = "task-4661516" [ 1262.030961] env[65758]: _type = "Task" [ 1262.030961] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.041248] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661516, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.330507] env[65758]: DEBUG oslo_concurrency.lockutils [req-15410350-c4ec-4653-844c-567dd0c944c1 req-0f2f043a-b6d1-4c2a-a9a4-a2ea7fd8b649 service nova] Releasing lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.330980] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1262.331225] env[65758]: DEBUG nova.network.neutron [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1262.373203] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.375970] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.702s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.376171] env[65758]: DEBUG nova.objects.instance [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1262.398014] env[65758]: INFO nova.scheduler.client.report [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted allocations for instance c97f02fc-a244-40e9-97b3-8cbbf516607a [ 1262.455057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "refresh_cache-45c9678b-0478-4192-8684-3b6fb0f4831e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.455057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquired lock "refresh_cache-45c9678b-0478-4192-8684-3b6fb0f4831e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1262.455313] env[65758]: DEBUG nova.network.neutron [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1262.541643] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661516, 'name': PowerOffVM_Task, 'duration_secs': 0.495464} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.542522] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1262.550896] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1262.551324] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fd5b929-1b8d-4cb6-8515-0bdfb25703c5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.574494] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1262.574494] env[65758]: value = "task-4661517" [ 1262.574494] env[65758]: _type = "Task" [ 1262.574494] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.583875] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661517, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.834851] env[65758]: WARNING openstack [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1262.835287] env[65758]: WARNING openstack [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1262.875992] env[65758]: DEBUG nova.network.neutron [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1262.907135] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1923f337-6c8c-4529-9fbe-a082e00b9e75 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "c97f02fc-a244-40e9-97b3-8cbbf516607a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.688s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.957745] env[65758]: WARNING neutronclient.v2_0.client [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1262.958542] env[65758]: WARNING openstack [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1262.958896] env[65758]: WARNING openstack [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1262.977397] env[65758]: WARNING neutronclient.v2_0.client [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1262.978038] env[65758]: WARNING openstack [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1262.978403] env[65758]: WARNING openstack [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1262.986608] env[65758]: DEBUG nova.network.neutron [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1263.039033] env[65758]: DEBUG nova.network.neutron [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1263.070310] env[65758]: DEBUG nova.network.neutron [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Updating instance_info_cache with network_info: [{"id": "72e1a94b-418a-4f03-a5c3-8876b1d7f3d3", "address": "fa:16:3e:65:d3:f1", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72e1a94b-41", "ovs_interfaceid": "72e1a94b-418a-4f03-a5c3-8876b1d7f3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1263.089823] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661517, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.384231] env[65758]: DEBUG oslo_concurrency.lockutils [None req-5d2097a9-52f0-4c67-9794-cf6e6110d996 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.473215] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.541776] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Releasing lock "refresh_cache-45c9678b-0478-4192-8684-3b6fb0f4831e" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.542400] env[65758]: DEBUG nova.compute.manager [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1263.542602] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1263.543519] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a09d061-0650-425a-b25c-06687246be96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.552424] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1263.552768] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cfba493-70f3-44cc-a8a0-eef8914e1544 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.561907] env[65758]: DEBUG oslo_vmware.api [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1263.561907] env[65758]: value = "task-4661518" [ 1263.561907] env[65758]: _type = "Task" [ 1263.561907] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.572037] env[65758]: DEBUG oslo_vmware.api [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661518, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.572610] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.572957] env[65758]: DEBUG nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Instance network_info: |[{"id": "72e1a94b-418a-4f03-a5c3-8876b1d7f3d3", "address": "fa:16:3e:65:d3:f1", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72e1a94b-41", "ovs_interfaceid": "72e1a94b-418a-4f03-a5c3-8876b1d7f3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1263.573453] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:d3:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc16c915-cff1-4faa-a529-9773ee9bab7e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72e1a94b-418a-4f03-a5c3-8876b1d7f3d3', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1263.581404] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1263.581503] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1263.584776] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d58b4eb-2df0-4354-9ac3-0cab58e8692f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.606704] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661517, 'name': ReconfigVM_Task, 'duration_secs': 0.525932} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.608456] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1263.608632] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1263.608890] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1263.608890] env[65758]: value = "task-4661519" [ 1263.608890] env[65758]: _type = "Task" [ 1263.608890] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.609148] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfb8d851-1446-4805-8d29-a16aed33a690 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.621064] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661519, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.622767] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1263.622767] env[65758]: value = "task-4661520" [ 1263.622767] env[65758]: _type = "Task" [ 1263.622767] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.633881] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.969878] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.970397] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.970912] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.971169] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.971423] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.974360] env[65758]: INFO nova.compute.manager [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Terminating instance [ 1264.071880] env[65758]: DEBUG oslo_vmware.api [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661518, 'name': PowerOffVM_Task, 'duration_secs': 0.137829} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.072155] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1264.072307] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1264.072571] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef1d778d-9ceb-4181-9362-6c7ff22fb79a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.125674] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661519, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.132333] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1264.132688] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1264.132972] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Deleting the datastore file [datastore2] 45c9678b-0478-4192-8684-3b6fb0f4831e {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1264.133973] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a942aca-85b1-481d-bebb-b24f723556ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.141851] env[65758]: DEBUG oslo_vmware.api [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661520, 'name': PowerOnVM_Task, 'duration_secs': 0.482274} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.142905] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1264.143271] env[65758]: DEBUG nova.compute.manager [None req-26a5a00b-18c3-411a-b69b-c10a9091d670 tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1264.144528] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54df35ba-88d6-4592-a558-4c441c426089 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.150546] env[65758]: DEBUG oslo_vmware.api [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for the task: (returnval){ [ 1264.150546] env[65758]: value = "task-4661522" [ 1264.150546] env[65758]: _type = "Task" [ 1264.150546] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.170537] env[65758]: DEBUG oslo_vmware.api [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.484973] env[65758]: DEBUG nova.compute.manager [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1264.485376] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1264.487166] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f83460-22fd-4316-903b-c7aeaa49e870 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.500981] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.501436] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f30cfdd1-e5fe-4994-b445-71ff18ddf9fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.511175] env[65758]: DEBUG oslo_vmware.api [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1264.511175] env[65758]: value = "task-4661523" [ 1264.511175] env[65758]: _type = "Task" [ 1264.511175] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.524148] env[65758]: DEBUG oslo_vmware.api [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.624132] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661519, 'name': CreateVM_Task, 'duration_secs': 0.572991} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.624354] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1264.624967] env[65758]: WARNING neutronclient.v2_0.client [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1264.625479] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.625647] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.626014] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1264.626288] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3369a2fa-e7c7-40ee-97a9-00f7dd28ca87 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.632093] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1264.632093] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520b5755-1e08-12ba-2ef4-8f6965e57ea5" [ 1264.632093] env[65758]: _type = "Task" [ 1264.632093] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.641922] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520b5755-1e08-12ba-2ef4-8f6965e57ea5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.662137] env[65758]: DEBUG oslo_vmware.api [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Task: {'id': task-4661522, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.387665} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.664947] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1264.665120] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1264.665393] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1264.666037] env[65758]: INFO nova.compute.manager [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1264.666037] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1264.667490] env[65758]: DEBUG nova.compute.manager [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1264.667490] env[65758]: DEBUG nova.network.neutron [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1264.667636] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1264.668546] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1264.668546] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1264.696354] env[65758]: DEBUG nova.network.neutron [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1264.696768] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1264.912029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "2bfca515-f4cb-4781-8423-aebf9477a69b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.912029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "2bfca515-f4cb-4781-8423-aebf9477a69b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.912029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "2bfca515-f4cb-4781-8423-aebf9477a69b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.912029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "2bfca515-f4cb-4781-8423-aebf9477a69b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.912029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "2bfca515-f4cb-4781-8423-aebf9477a69b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.914403] env[65758]: INFO nova.compute.manager [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Terminating instance [ 1265.022214] env[65758]: DEBUG oslo_vmware.api [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661523, 'name': PowerOffVM_Task, 'duration_secs': 0.401862} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.022537] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1265.022713] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1265.022975] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-270fb9e9-2d50-432d-931e-23d8f47a8817 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.090097] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1265.090443] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1265.090667] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleting the datastore file [datastore1] cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1265.090956] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87baeb21-1b9e-48ae-9f86-7b32582e87d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.097682] env[65758]: DEBUG oslo_vmware.api [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1265.097682] env[65758]: value = "task-4661525" [ 1265.097682] env[65758]: _type = "Task" [ 1265.097682] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.106991] env[65758]: DEBUG oslo_vmware.api [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.143434] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520b5755-1e08-12ba-2ef4-8f6965e57ea5, 'name': SearchDatastore_Task, 'duration_secs': 0.018529} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.143764] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.144037] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1265.144295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.144478] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1265.144698] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1265.145075] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-356e759a-31ac-4b72-83fb-dcd109ba0a75 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.153938] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1265.154133] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1265.154893] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd7e90a1-5278-4226-8fe1-5e5465a0748e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.160567] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1265.160567] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d120d8-a85f-6af8-7a7e-03ef3698cc54" [ 1265.160567] env[65758]: _type = "Task" [ 1265.160567] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.169692] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d120d8-a85f-6af8-7a7e-03ef3698cc54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.199168] env[65758]: DEBUG nova.network.neutron [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1265.418353] env[65758]: DEBUG nova.compute.manager [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1265.418585] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1265.419497] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049b5e99-a012-4dc0-93ac-3791030fec42 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.427965] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1265.428250] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66352d07-1795-4d97-90fa-4325ae1e184a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.435487] env[65758]: DEBUG oslo_vmware.api [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1265.435487] env[65758]: value = "task-4661526" [ 1265.435487] env[65758]: _type = "Task" [ 1265.435487] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.445589] env[65758]: DEBUG oslo_vmware.api [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661526, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.608725] env[65758]: DEBUG oslo_vmware.api [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126103} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.609241] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.609241] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1265.609368] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1265.609525] env[65758]: INFO nova.compute.manager [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1265.609781] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1265.609983] env[65758]: DEBUG nova.compute.manager [-] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1265.610093] env[65758]: DEBUG nova.network.neutron [-] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1265.610345] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1265.610909] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1265.611196] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1265.649781] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1265.672256] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d120d8-a85f-6af8-7a7e-03ef3698cc54, 'name': SearchDatastore_Task, 'duration_secs': 0.008939} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.673067] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b0497c7-95df-4313-9515-1e212827187f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.679593] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1265.679593] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527f05f5-08cf-4442-c523-b3847fbd3928" [ 1265.679593] env[65758]: _type = "Task" [ 1265.679593] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.690069] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527f05f5-08cf-4442-c523-b3847fbd3928, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.701792] env[65758]: INFO nova.compute.manager [-] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Took 1.03 seconds to deallocate network for instance. [ 1265.859624] env[65758]: DEBUG nova.compute.manager [req-807853da-0805-449d-94d2-4a0d53a00e98 req-0a889828-963d-4669-9039-b5ee990e5f56 service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Received event network-vif-deleted-c0fe8827-b903-4031-a8be-c5b8a66577af {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1265.859624] env[65758]: INFO nova.compute.manager [req-807853da-0805-449d-94d2-4a0d53a00e98 req-0a889828-963d-4669-9039-b5ee990e5f56 service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Neutron deleted interface c0fe8827-b903-4031-a8be-c5b8a66577af; detaching it from the instance and deleting it from the info cache [ 1265.859809] env[65758]: DEBUG nova.network.neutron [req-807853da-0805-449d-94d2-4a0d53a00e98 req-0a889828-963d-4669-9039-b5ee990e5f56 service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1265.946897] env[65758]: DEBUG oslo_vmware.api [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661526, 'name': PowerOffVM_Task, 'duration_secs': 0.203073} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.947220] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1265.947440] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1265.947753] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b02b3960-c995-40e2-a915-662191adf4cf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.013978] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1266.014239] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1266.014424] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Deleting the datastore file [datastore2] 2bfca515-f4cb-4781-8423-aebf9477a69b {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1266.014717] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d9068ea-734f-41f4-b9de-36e845903a03 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.021515] env[65758]: DEBUG oslo_vmware.api [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1266.021515] env[65758]: value = "task-4661528" [ 1266.021515] env[65758]: _type = "Task" [ 1266.021515] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.030607] env[65758]: DEBUG oslo_vmware.api [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.191029] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527f05f5-08cf-4442-c523-b3847fbd3928, 'name': SearchDatastore_Task, 'duration_secs': 0.012147} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.191285] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1266.191541] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] e505f8e8-0612-4fe7-bcd2-73fdd39458fa/e505f8e8-0612-4fe7-bcd2-73fdd39458fa.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1266.191807] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d1cb894-e38b-47f7-923c-70ea5f5cc5b4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.199136] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1266.199136] env[65758]: value = "task-4661529" [ 1266.199136] env[65758]: _type = "Task" [ 1266.199136] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.207895] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.208193] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.208434] env[65758]: DEBUG nova.objects.instance [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lazy-loading 'resources' on Instance uuid 45c9678b-0478-4192-8684-3b6fb0f4831e {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1266.209443] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661529, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.340069] env[65758]: DEBUG nova.network.neutron [-] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1266.362419] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-42d5f179-cafa-4015-899e-d1fd58bba928 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.373391] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7091085b-f61d-4e29-b4a7-95c3143cdcc0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.404115] env[65758]: DEBUG nova.compute.manager [req-807853da-0805-449d-94d2-4a0d53a00e98 req-0a889828-963d-4669-9039-b5ee990e5f56 service nova] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Detach interface failed, port_id=c0fe8827-b903-4031-a8be-c5b8a66577af, reason: Instance cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1266.534472] env[65758]: DEBUG oslo_vmware.api [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14581} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.534741] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1266.534926] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1266.535116] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1266.535322] env[65758]: INFO nova.compute.manager [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1266.535656] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1266.535865] env[65758]: DEBUG nova.compute.manager [-] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1266.535960] env[65758]: DEBUG nova.network.neutron [-] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1266.536224] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1266.536859] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1266.537132] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1266.578122] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1266.713171] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661529, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.842607] env[65758]: INFO nova.compute.manager [-] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Took 1.23 seconds to deallocate network for instance. [ 1266.855546] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb43d3e8-7136-435a-801e-4a915fa9e476 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.864868] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e78361-ef7e-455d-bc46-973ff059fc2e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.898032] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4af0576-42a4-4cb6-a76e-f31cb16591d3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.908193] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed18a172-adcc-44d9-9e6d-e976ab374733 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.924716] env[65758]: DEBUG nova.compute.provider_tree [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1267.210579] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661529, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.351143] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.368783] env[65758]: DEBUG nova.network.neutron [-] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1267.459438] env[65758]: DEBUG nova.scheduler.client.report [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Updated inventory for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with generation 171 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1267.459777] env[65758]: DEBUG nova.compute.provider_tree [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Updating resource provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 generation from 171 to 172 during operation: update_inventory {{(pid=65758) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1267.460047] env[65758]: DEBUG nova.compute.provider_tree [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Updating inventory in ProviderTree for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1267.714075] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661529, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.508451} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.714514] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore1] e505f8e8-0612-4fe7-bcd2-73fdd39458fa/e505f8e8-0612-4fe7-bcd2-73fdd39458fa.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1267.714714] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1267.715026] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd2e0064-66e3-4842-af7c-a1e5d7331e65 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.723723] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1267.723723] env[65758]: value = "task-4661530" [ 1267.723723] env[65758]: _type = "Task" [ 1267.723723] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.735810] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.871736] env[65758]: INFO nova.compute.manager [-] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Took 1.34 seconds to deallocate network for instance. [ 1267.885565] env[65758]: DEBUG nova.compute.manager [req-e6c84174-15b6-47ea-82aa-3aab71223d0c req-0d829525-2b09-4e11-92b6-132871334da5 service nova] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Received event network-vif-deleted-f268cd74-fb17-4936-92b2-939e07f2fdfa {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1267.965889] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.757s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.968699] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.618s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.969115] env[65758]: DEBUG nova.objects.instance [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lazy-loading 'resources' on Instance uuid cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1267.988053] env[65758]: INFO nova.scheduler.client.report [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Deleted allocations for instance 45c9678b-0478-4192-8684-3b6fb0f4831e [ 1268.234013] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661530, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066702} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.234229] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1268.234984] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d700270-fa9a-4629-a88c-e96827197120 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.257892] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] e505f8e8-0612-4fe7-bcd2-73fdd39458fa/e505f8e8-0612-4fe7-bcd2-73fdd39458fa.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1268.258217] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4956f845-ea8d-4ab3-8ffb-f40b978b402f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.278643] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1268.278643] env[65758]: value = "task-4661531" [ 1268.278643] env[65758]: _type = "Task" [ 1268.278643] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.287490] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661531, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.378334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1268.496619] env[65758]: DEBUG oslo_concurrency.lockutils [None req-3ecc79ad-921b-47b9-a53a-007e5fcac5b2 tempest-ServerShowV257Test-620117805 tempest-ServerShowV257Test-620117805-project-member] Lock "45c9678b-0478-4192-8684-3b6fb0f4831e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.552s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.592835] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1262885-65aa-48af-9086-0d2406300d84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.601441] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86551de3-43e7-48af-9d9a-7ddc4dc842f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.633568] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa03ed20-e1d3-476a-ab7b-400019cc43d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.641820] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee6c1cf-3011-46ee-a794-96f09b618c72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.656071] env[65758]: DEBUG nova.compute.provider_tree [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.788902] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661531, 'name': ReconfigVM_Task, 'duration_secs': 0.263315} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.789330] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Reconfigured VM instance instance-00000079 to attach disk [datastore1] e505f8e8-0612-4fe7-bcd2-73fdd39458fa/e505f8e8-0612-4fe7-bcd2-73fdd39458fa.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1268.789942] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f083582-8ed2-4cb9-9c90-3e1c9a624f3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.796721] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1268.796721] env[65758]: value = "task-4661532" [ 1268.796721] env[65758]: _type = "Task" [ 1268.796721] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.805107] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661532, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.159975] env[65758]: DEBUG nova.scheduler.client.report [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1269.308085] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661532, 'name': Rename_Task, 'duration_secs': 0.1391} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.308365] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1269.308672] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-766bc8ad-3f64-4f52-83cd-41f5c0933226 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.316436] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1269.316436] env[65758]: value = "task-4661533" [ 1269.316436] env[65758]: _type = "Task" [ 1269.316436] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.325492] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661533, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.664838] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.696s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.668035] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.289s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.668035] env[65758]: DEBUG nova.objects.instance [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lazy-loading 'resources' on Instance uuid 2bfca515-f4cb-4781-8423-aebf9477a69b {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.689799] env[65758]: INFO nova.scheduler.client.report [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted allocations for instance cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42 [ 1269.827484] env[65758]: DEBUG oslo_vmware.api [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661533, 'name': PowerOnVM_Task, 'duration_secs': 0.449237} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.827851] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1269.827948] env[65758]: INFO nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1269.828136] env[65758]: DEBUG nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1269.828959] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd985b9-575a-4a11-9f61-f7b29b38f86e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.197683] env[65758]: DEBUG oslo_concurrency.lockutils [None req-a90196bb-8ac1-4cbb-95cf-8d19dd2e8f21 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.227s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.269237] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdbf7fa-5d53-4fd2-af46-27601e148e24 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.277556] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b4c33c-6ca2-4e3c-8ae9-4039a5a13f9e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.310691] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58f94e4-bdc5-461a-a61d-e7a35a774f86 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.320229] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2db3bd1-75ed-45ea-8fc6-56486e2884c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.334755] env[65758]: DEBUG nova.compute.provider_tree [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.345404] env[65758]: INFO nova.compute.manager [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Took 15.79 seconds to build instance. [ 1270.839338] env[65758]: DEBUG nova.scheduler.client.report [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1270.849033] env[65758]: DEBUG oslo_concurrency.lockutils [None req-e5c58654-fc36-49d2-89a6-e0c529d16d9e tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.305s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.061367] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "8b2cfaf8-dd34-4262-a867-613502a964ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.061593] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "8b2cfaf8-dd34-4262-a867-613502a964ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.190311] env[65758]: DEBUG nova.compute.manager [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Received event network-changed-72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1271.191029] env[65758]: DEBUG nova.compute.manager [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Refreshing instance network info cache due to event network-changed-72e1a94b-418a-4f03-a5c3-8876b1d7f3d3. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1271.191263] env[65758]: DEBUG oslo_concurrency.lockutils [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] Acquiring lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.191539] env[65758]: DEBUG oslo_concurrency.lockutils [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] Acquired lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.191773] env[65758]: DEBUG nova.network.neutron [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Refreshing network info cache for port 72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1271.343892] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.366541] env[65758]: INFO nova.scheduler.client.report [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Deleted allocations for instance 2bfca515-f4cb-4781-8423-aebf9477a69b [ 1271.563658] env[65758]: DEBUG nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1271.694434] env[65758]: WARNING neutronclient.v2_0.client [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1271.695255] env[65758]: WARNING openstack [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1271.695675] env[65758]: WARNING openstack [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1271.863960] env[65758]: WARNING neutronclient.v2_0.client [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1271.864766] env[65758]: WARNING openstack [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1271.865200] env[65758]: WARNING openstack [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1271.875193] env[65758]: DEBUG oslo_concurrency.lockutils [None req-77337dec-08e3-41d2-b8a6-eec3bb7ca88c tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "2bfca515-f4cb-4781-8423-aebf9477a69b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.964s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.967789] env[65758]: DEBUG nova.network.neutron [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Updated VIF entry in instance network info cache for port 72e1a94b-418a-4f03-a5c3-8876b1d7f3d3. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1271.968250] env[65758]: DEBUG nova.network.neutron [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Updating instance_info_cache with network_info: [{"id": "72e1a94b-418a-4f03-a5c3-8876b1d7f3d3", "address": "fa:16:3e:65:d3:f1", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72e1a94b-41", "ovs_interfaceid": "72e1a94b-418a-4f03-a5c3-8876b1d7f3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1272.087240] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.087583] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.089154] env[65758]: INFO nova.compute.claims [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.471770] env[65758]: DEBUG oslo_concurrency.lockutils [req-ae7330ab-62b2-46de-a61f-1a6a07f2e85c req-d6386738-60e1-4750-ad37-5b545dea5277 service nova] Releasing lock "refresh_cache-e505f8e8-0612-4fe7-bcd2-73fdd39458fa" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.645314] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.645766] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.646047] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.646282] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.646520] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.648782] env[65758]: INFO nova.compute.manager [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Terminating instance [ 1273.153204] env[65758]: DEBUG nova.compute.manager [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1273.153564] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1273.154436] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49612714-b019-4286-89d2-7297a3771669 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.163573] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1273.163912] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76a3f673-27d9-4ea8-acb6-2e9091045bfc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.171993] env[65758]: DEBUG oslo_vmware.api [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1273.171993] env[65758]: value = "task-4661534" [ 1273.171993] env[65758]: _type = "Task" [ 1273.171993] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.181515] env[65758]: DEBUG oslo_vmware.api [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.297646] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487adeed-eafe-4303-a9cc-802c33441bef {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.306099] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51839082-9d41-4de7-866a-89ea49811521 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.339158] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ce507e-36d4-4c95-b7f6-61af92bfbd1d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.348647] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f50679f-79a6-46d8-96e3-4cd7d0a52a55 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.364314] env[65758]: DEBUG nova.compute.provider_tree [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.682235] env[65758]: DEBUG oslo_vmware.api [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661534, 'name': PowerOffVM_Task, 'duration_secs': 0.237936} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.682640] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1273.682885] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1273.683291] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4af30a83-23ff-467c-bf1a-ad6bbe8363d1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.749895] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1273.750149] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1273.750330] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Deleting the datastore file [datastore1] 0f3ae822-4c4c-4dff-94d4-3416187d6d25 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1273.750615] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2904f292-e222-4b21-bd80-9243f0e7c645 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.757843] env[65758]: DEBUG oslo_vmware.api [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for the task: (returnval){ [ 1273.757843] env[65758]: value = "task-4661536" [ 1273.757843] env[65758]: _type = "Task" [ 1273.757843] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.766249] env[65758]: DEBUG oslo_vmware.api [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.867685] env[65758]: DEBUG nova.scheduler.client.report [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1274.269122] env[65758]: DEBUG oslo_vmware.api [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Task: {'id': task-4661536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.394179} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.269122] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1274.269122] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1274.269122] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1274.269706] env[65758]: INFO nova.compute.manager [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1274.269706] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1274.269706] env[65758]: DEBUG nova.compute.manager [-] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1274.269817] env[65758]: DEBUG nova.network.neutron [-] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1274.270049] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1274.270599] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1274.270854] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1274.307020] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1274.372972] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.285s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.373629] env[65758]: DEBUG nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1274.531584] env[65758]: DEBUG nova.compute.manager [req-090d2b3a-0c96-420e-924e-974bfe20a9bf req-fdac72bd-5bc1-4d20-a795-9d867e377899 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Received event network-vif-deleted-b15c4724-d64a-4321-8c27-5e337f8b9312 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1274.531780] env[65758]: INFO nova.compute.manager [req-090d2b3a-0c96-420e-924e-974bfe20a9bf req-fdac72bd-5bc1-4d20-a795-9d867e377899 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Neutron deleted interface b15c4724-d64a-4321-8c27-5e337f8b9312; detaching it from the instance and deleting it from the info cache [ 1274.531957] env[65758]: DEBUG nova.network.neutron [req-090d2b3a-0c96-420e-924e-974bfe20a9bf req-fdac72bd-5bc1-4d20-a795-9d867e377899 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1274.878696] env[65758]: DEBUG nova.compute.utils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1274.881078] env[65758]: DEBUG nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1274.881274] env[65758]: DEBUG nova.network.neutron [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1274.881608] env[65758]: WARNING neutronclient.v2_0.client [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1274.881906] env[65758]: WARNING neutronclient.v2_0.client [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1274.882513] env[65758]: WARNING openstack [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1274.882853] env[65758]: WARNING openstack [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1274.947183] env[65758]: DEBUG nova.policy [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc85d2d1d84f4df0b4de5e6388bb9398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82bfbb5ee6714c9aa5119cb714d28ce2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1275.011962] env[65758]: DEBUG nova.network.neutron [-] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1275.034299] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7754a7d-3fba-40d4-bba8-0f4cc0cb9c0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.046685] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a598dea-e269-4776-beb9-bba0473c8445 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.077706] env[65758]: DEBUG nova.compute.manager [req-090d2b3a-0c96-420e-924e-974bfe20a9bf req-fdac72bd-5bc1-4d20-a795-9d867e377899 service nova] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Detach interface failed, port_id=b15c4724-d64a-4321-8c27-5e337f8b9312, reason: Instance 0f3ae822-4c4c-4dff-94d4-3416187d6d25 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1275.234302] env[65758]: DEBUG nova.network.neutron [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Successfully created port: 3bf0a249-5403-4cc2-a15e-f66b0091f23a {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1275.382130] env[65758]: DEBUG nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1275.515250] env[65758]: INFO nova.compute.manager [-] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Took 1.25 seconds to deallocate network for instance. [ 1276.022211] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.022522] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.022747] env[65758]: DEBUG nova.objects.instance [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lazy-loading 'resources' on Instance uuid 0f3ae822-4c4c-4dff-94d4-3416187d6d25 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1276.391581] env[65758]: DEBUG nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1276.426990] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1276.427268] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1276.427438] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1276.427627] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1276.427767] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1276.427907] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1276.428359] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1276.428522] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1276.428689] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1276.428857] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1276.429021] env[65758]: DEBUG nova.virt.hardware [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1276.430095] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f677d2-ee75-430e-9b0e-604991c91e5e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.439971] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a22af8-86c6-414c-ad18-40a09c1bbe56 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.631070] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e55b84-e993-4a35-85e2-b28b0f95ea47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.641444] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b1960c-6b75-492d-8662-59fc12372fdb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.676032] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684a1a5e-a968-4d76-97c9-cc59526e1944 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.679383] env[65758]: DEBUG nova.compute.manager [req-751562e9-915b-48f7-81a5-145f99ceab67 req-c4021772-8de0-4805-8e23-70db3ed69769 service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Received event network-vif-plugged-3bf0a249-5403-4cc2-a15e-f66b0091f23a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1276.679596] env[65758]: DEBUG oslo_concurrency.lockutils [req-751562e9-915b-48f7-81a5-145f99ceab67 req-c4021772-8de0-4805-8e23-70db3ed69769 service nova] Acquiring lock "8b2cfaf8-dd34-4262-a867-613502a964ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.679791] env[65758]: DEBUG oslo_concurrency.lockutils [req-751562e9-915b-48f7-81a5-145f99ceab67 req-c4021772-8de0-4805-8e23-70db3ed69769 service nova] Lock "8b2cfaf8-dd34-4262-a867-613502a964ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.679947] env[65758]: DEBUG oslo_concurrency.lockutils [req-751562e9-915b-48f7-81a5-145f99ceab67 req-c4021772-8de0-4805-8e23-70db3ed69769 service nova] Lock "8b2cfaf8-dd34-4262-a867-613502a964ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.680149] env[65758]: DEBUG nova.compute.manager [req-751562e9-915b-48f7-81a5-145f99ceab67 req-c4021772-8de0-4805-8e23-70db3ed69769 service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] No waiting events found dispatching network-vif-plugged-3bf0a249-5403-4cc2-a15e-f66b0091f23a {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1276.680365] env[65758]: WARNING nova.compute.manager [req-751562e9-915b-48f7-81a5-145f99ceab67 req-c4021772-8de0-4805-8e23-70db3ed69769 service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Received unexpected event network-vif-plugged-3bf0a249-5403-4cc2-a15e-f66b0091f23a for instance with vm_state building and task_state spawning. [ 1276.686876] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc65fac-c7ce-46c9-a0fd-3d0ea1f1e1ec {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.702289] env[65758]: DEBUG nova.compute.provider_tree [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.782344] env[65758]: DEBUG nova.network.neutron [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Successfully updated port: 3bf0a249-5403-4cc2-a15e-f66b0091f23a {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1277.205968] env[65758]: DEBUG nova.scheduler.client.report [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1277.285956] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "refresh_cache-8b2cfaf8-dd34-4262-a867-613502a964ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.286066] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "refresh_cache-8b2cfaf8-dd34-4262-a867-613502a964ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1277.286318] env[65758]: DEBUG nova.network.neutron [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1277.711049] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.688s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.736206] env[65758]: INFO nova.scheduler.client.report [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Deleted allocations for instance 0f3ae822-4c4c-4dff-94d4-3416187d6d25 [ 1277.789929] env[65758]: WARNING openstack [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1277.790371] env[65758]: WARNING openstack [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1277.830257] env[65758]: DEBUG nova.network.neutron [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1277.911339] env[65758]: WARNING neutronclient.v2_0.client [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1277.911998] env[65758]: WARNING openstack [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1277.912394] env[65758]: WARNING openstack [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1278.005618] env[65758]: DEBUG nova.network.neutron [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Updating instance_info_cache with network_info: [{"id": "3bf0a249-5403-4cc2-a15e-f66b0091f23a", "address": "fa:16:3e:c7:73:1b", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf0a249-54", "ovs_interfaceid": "3bf0a249-5403-4cc2-a15e-f66b0091f23a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1278.244247] env[65758]: DEBUG oslo_concurrency.lockutils [None req-180429fc-a17f-475b-aa78-8404b0b8d5ba tempest-ServerRescueTestJSON-7402537 tempest-ServerRescueTestJSON-7402537-project-member] Lock "0f3ae822-4c4c-4dff-94d4-3416187d6d25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.598s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.509061] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "refresh_cache-8b2cfaf8-dd34-4262-a867-613502a964ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1278.509393] env[65758]: DEBUG nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Instance network_info: |[{"id": "3bf0a249-5403-4cc2-a15e-f66b0091f23a", "address": "fa:16:3e:c7:73:1b", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf0a249-54", "ovs_interfaceid": "3bf0a249-5403-4cc2-a15e-f66b0091f23a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1278.509869] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:73:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bf0a249-5403-4cc2-a15e-f66b0091f23a', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1278.517772] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1278.518049] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1278.518332] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c5ffebc-3cff-4c5e-9bd8-1cbc5e33b4d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.541381] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1278.541381] env[65758]: value = "task-4661537" [ 1278.541381] env[65758]: _type = "Task" [ 1278.541381] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.550276] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661537, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.692949] env[65758]: DEBUG nova.compute.manager [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Received event network-changed-3bf0a249-5403-4cc2-a15e-f66b0091f23a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1278.693264] env[65758]: DEBUG nova.compute.manager [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Refreshing instance network info cache due to event network-changed-3bf0a249-5403-4cc2-a15e-f66b0091f23a. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1278.693586] env[65758]: DEBUG oslo_concurrency.lockutils [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] Acquiring lock "refresh_cache-8b2cfaf8-dd34-4262-a867-613502a964ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.693804] env[65758]: DEBUG oslo_concurrency.lockutils [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] Acquired lock "refresh_cache-8b2cfaf8-dd34-4262-a867-613502a964ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1278.694122] env[65758]: DEBUG nova.network.neutron [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Refreshing network info cache for port 3bf0a249-5403-4cc2-a15e-f66b0091f23a {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1279.052186] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661537, 'name': CreateVM_Task, 'duration_secs': 0.426633} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.052539] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1279.052799] env[65758]: WARNING neutronclient.v2_0.client [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1279.053210] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.053364] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.053704] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1279.053969] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fdfe44c-cfe1-4d0e-98f9-04f3d3838348 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.059638] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1279.059638] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52552fac-7d91-f46b-4019-f6679481afcc" [ 1279.059638] env[65758]: _type = "Task" [ 1279.059638] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.069735] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52552fac-7d91-f46b-4019-f6679481afcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.196867] env[65758]: WARNING neutronclient.v2_0.client [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1279.197588] env[65758]: WARNING openstack [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1279.197968] env[65758]: WARNING openstack [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1279.363415] env[65758]: WARNING neutronclient.v2_0.client [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1279.364082] env[65758]: WARNING openstack [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1279.364462] env[65758]: WARNING openstack [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1279.454494] env[65758]: DEBUG nova.network.neutron [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Updated VIF entry in instance network info cache for port 3bf0a249-5403-4cc2-a15e-f66b0091f23a. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1279.455185] env[65758]: DEBUG nova.network.neutron [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Updating instance_info_cache with network_info: [{"id": "3bf0a249-5403-4cc2-a15e-f66b0091f23a", "address": "fa:16:3e:c7:73:1b", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf0a249-54", "ovs_interfaceid": "3bf0a249-5403-4cc2-a15e-f66b0091f23a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1279.571236] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52552fac-7d91-f46b-4019-f6679481afcc, 'name': SearchDatastore_Task, 'duration_secs': 0.011308} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.571500] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.571733] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1279.572067] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.572214] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.572397] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1279.572688] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddfeb509-22a5-4e57-a6e1-fe668ac8be7e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.593188] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1279.593458] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1279.594274] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cec4830-2076-4646-8e46-c0d3907ad622 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.600779] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1279.600779] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]520a3de4-f84b-ee95-88ce-bb98baa59c34" [ 1279.600779] env[65758]: _type = "Task" [ 1279.600779] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.609943] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520a3de4-f84b-ee95-88ce-bb98baa59c34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.958360] env[65758]: DEBUG oslo_concurrency.lockutils [req-8881f0ef-65a1-48d4-92d0-02ac38ddf60f req-d876746d-07df-439f-aba8-de4a78a137d5 service nova] Releasing lock "refresh_cache-8b2cfaf8-dd34-4262-a867-613502a964ca" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.112630] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]520a3de4-f84b-ee95-88ce-bb98baa59c34, 'name': SearchDatastore_Task, 'duration_secs': 0.010944} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.113710] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84039c20-4bc9-43c5-9b03-cfc85f3d819d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.121726] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1280.121726] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ab3357-9757-39c5-1092-b3b097f585af" [ 1280.121726] env[65758]: _type = "Task" [ 1280.121726] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.131466] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ab3357-9757-39c5-1092-b3b097f585af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.633300] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ab3357-9757-39c5-1092-b3b097f585af, 'name': SearchDatastore_Task, 'duration_secs': 0.012006} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.633523] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.633803] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 8b2cfaf8-dd34-4262-a867-613502a964ca/8b2cfaf8-dd34-4262-a867-613502a964ca.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1280.634119] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b237dc68-d620-4c10-9ea0-870c48a36cbc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.642495] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1280.642495] env[65758]: value = "task-4661538" [ 1280.642495] env[65758]: _type = "Task" [ 1280.642495] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.651369] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.153436] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661538, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481191} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.153816] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 8b2cfaf8-dd34-4262-a867-613502a964ca/8b2cfaf8-dd34-4262-a867-613502a964ca.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1281.153971] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1281.154217] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b7f7875-b46e-403c-9a3a-ae8763faad70 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.163434] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1281.163434] env[65758]: value = "task-4661539" [ 1281.163434] env[65758]: _type = "Task" [ 1281.163434] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.173871] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.673491] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073978} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.673491] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1281.674189] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f107df-dc08-4f11-bad4-4ee45c761207 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.697217] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 8b2cfaf8-dd34-4262-a867-613502a964ca/8b2cfaf8-dd34-4262-a867-613502a964ca.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1281.697584] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05d015f3-ba72-4fb5-a13b-ffde05a53272 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.719359] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1281.719359] env[65758]: value = "task-4661540" [ 1281.719359] env[65758]: _type = "Task" [ 1281.719359] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.729093] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661540, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.230392] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661540, 'name': ReconfigVM_Task, 'duration_secs': 0.297295} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.230776] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 8b2cfaf8-dd34-4262-a867-613502a964ca/8b2cfaf8-dd34-4262-a867-613502a964ca.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1282.231352] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cda20053-70e5-471b-9e91-33c62b305fbe {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.239665] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1282.239665] env[65758]: value = "task-4661541" [ 1282.239665] env[65758]: _type = "Task" [ 1282.239665] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.248726] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661541, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.750243] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661541, 'name': Rename_Task, 'duration_secs': 0.157804} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.750562] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1282.750833] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ba1bcc3-69a6-4406-9c18-4c8aff4d109a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.757542] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1282.757542] env[65758]: value = "task-4661542" [ 1282.757542] env[65758]: _type = "Task" [ 1282.757542] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.765732] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.268177] env[65758]: DEBUG oslo_vmware.api [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661542, 'name': PowerOnVM_Task, 'duration_secs': 0.440266} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.268557] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1283.268685] env[65758]: INFO nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Took 6.88 seconds to spawn the instance on the hypervisor. [ 1283.268824] env[65758]: DEBUG nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1283.269618] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345547c2-da5e-4fc1-a9b4-45981b335189 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.786185] env[65758]: INFO nova.compute.manager [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Took 11.72 seconds to build instance. [ 1284.288164] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1e984ad0-e969-4e0b-aa64-c982b198fee2 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "8b2cfaf8-dd34-4262-a867-613502a964ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.226s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.472442] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "8b2cfaf8-dd34-4262-a867-613502a964ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.472637] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "8b2cfaf8-dd34-4262-a867-613502a964ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.472848] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "8b2cfaf8-dd34-4262-a867-613502a964ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.473041] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "8b2cfaf8-dd34-4262-a867-613502a964ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.473215] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "8b2cfaf8-dd34-4262-a867-613502a964ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.475398] env[65758]: INFO nova.compute.manager [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Terminating instance [ 1284.980373] env[65758]: DEBUG nova.compute.manager [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1284.980672] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1284.981802] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f48a989-ed0e-4845-aec2-53722ad9bbfa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.991503] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1284.991872] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d97271c-2f5d-440f-a869-cddc6b200ad0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.000130] env[65758]: DEBUG oslo_vmware.api [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1285.000130] env[65758]: value = "task-4661543" [ 1285.000130] env[65758]: _type = "Task" [ 1285.000130] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.009775] env[65758]: DEBUG oslo_vmware.api [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.063569] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.063870] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.064042] env[65758]: INFO nova.compute.manager [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Shelving [ 1285.511256] env[65758]: DEBUG oslo_vmware.api [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661543, 'name': PowerOffVM_Task, 'duration_secs': 0.208937} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.511627] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1285.511695] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1285.511943] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9c7a902-d278-4eca-823d-d93266351f7b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.582131] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1285.582354] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1285.582520] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleting the datastore file [datastore2] 8b2cfaf8-dd34-4262-a867-613502a964ca {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1285.583091] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-733fe59f-cede-4f0e-b33f-94fe499c362b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.591168] env[65758]: DEBUG oslo_vmware.api [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1285.591168] env[65758]: value = "task-4661545" [ 1285.591168] env[65758]: _type = "Task" [ 1285.591168] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.600744] env[65758]: DEBUG oslo_vmware.api [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.074873] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1286.075346] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-376e0170-9d91-4fab-91db-5ffe4cc74e82 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.085344] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1286.085344] env[65758]: value = "task-4661546" [ 1286.085344] env[65758]: _type = "Task" [ 1286.085344] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.100483] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661546, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.106462] env[65758]: DEBUG oslo_vmware.api [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134088} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.106855] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1286.107177] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1286.107464] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1286.107679] env[65758]: INFO nova.compute.manager [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1286.107963] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1286.108193] env[65758]: DEBUG nova.compute.manager [-] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1286.108305] env[65758]: DEBUG nova.network.neutron [-] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1286.108535] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1286.109103] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1286.109363] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1286.198570] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1286.456938] env[65758]: DEBUG nova.compute.manager [req-36111e71-ed47-4f88-afb4-542d3f16c7df req-89be1b1b-a1d3-49ce-8050-d61fccd6a48a service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Received event network-vif-deleted-3bf0a249-5403-4cc2-a15e-f66b0091f23a {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1286.457149] env[65758]: INFO nova.compute.manager [req-36111e71-ed47-4f88-afb4-542d3f16c7df req-89be1b1b-a1d3-49ce-8050-d61fccd6a48a service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Neutron deleted interface 3bf0a249-5403-4cc2-a15e-f66b0091f23a; detaching it from the instance and deleting it from the info cache [ 1286.457316] env[65758]: DEBUG nova.network.neutron [req-36111e71-ed47-4f88-afb4-542d3f16c7df req-89be1b1b-a1d3-49ce-8050-d61fccd6a48a service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1286.595182] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661546, 'name': PowerOffVM_Task, 'duration_secs': 0.22397} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.595613] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1286.596305] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b865e482-fc2b-450c-b8db-d88788f27500 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.616791] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684f34a5-e6b5-411a-8c77-5d06d870e5ac {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.937016] env[65758]: DEBUG nova.network.neutron [-] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1286.960433] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-edeaef04-58eb-4480-93b4-27222d143812 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.971480] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81a3320-ac3e-4898-b55f-d1167cfa0bcd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.003690] env[65758]: DEBUG nova.compute.manager [req-36111e71-ed47-4f88-afb4-542d3f16c7df req-89be1b1b-a1d3-49ce-8050-d61fccd6a48a service nova] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Detach interface failed, port_id=3bf0a249-5403-4cc2-a15e-f66b0091f23a, reason: Instance 8b2cfaf8-dd34-4262-a867-613502a964ca could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1287.128310] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Creating Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1287.128789] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6a652e86-8a0c-4729-9416-c1f9f85d1662 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.138105] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1287.138105] env[65758]: value = "task-4661547" [ 1287.138105] env[65758]: _type = "Task" [ 1287.138105] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.148847] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661547, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.439924] env[65758]: INFO nova.compute.manager [-] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Took 1.33 seconds to deallocate network for instance. [ 1287.648290] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661547, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.902587] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.902908] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.903174] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.903400] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.903622] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.905917] env[65758]: INFO nova.compute.manager [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Terminating instance [ 1287.948038] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.948338] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.948659] env[65758]: DEBUG nova.objects.instance [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lazy-loading 'resources' on Instance uuid 8b2cfaf8-dd34-4262-a867-613502a964ca {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.150430] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661547, 'name': CreateSnapshot_Task, 'duration_secs': 0.916502} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.150731] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Created Snapshot of the VM instance {{(pid=65758) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1288.151518] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8a7211-6c25-41f8-abce-47bfbfbd6fcc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.410623] env[65758]: DEBUG nova.compute.manager [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1288.410840] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1288.411748] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b4dd9f-7b7d-4269-aed1-a2724c9ede83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.420237] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1288.420507] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0088631f-ff78-479f-8e5e-a3303775d818 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.427388] env[65758]: DEBUG oslo_vmware.api [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1288.427388] env[65758]: value = "task-4661548" [ 1288.427388] env[65758]: _type = "Task" [ 1288.427388] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.436148] env[65758]: DEBUG oslo_vmware.api [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.554611] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9caea0-2b3e-45c4-8cc8-a509a1d8e69a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.562792] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b775c4d3-0fd4-4365-88c5-163a841f2eba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.596333] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3554e0d7-991b-4b75-ade3-ae2a3fae41e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.605804] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27402061-80d5-43a3-9a24-e5373225c696 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.621088] env[65758]: DEBUG nova.compute.provider_tree [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.671070] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Creating linked-clone VM from snapshot {{(pid=65758) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1288.671415] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f6ac4b63-4e7a-468a-ad43-ca3622f0f86d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.681080] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1288.681080] env[65758]: value = "task-4661549" [ 1288.681080] env[65758]: _type = "Task" [ 1288.681080] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.689733] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661549, 'name': CloneVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.941167] env[65758]: DEBUG oslo_vmware.api [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661548, 'name': PowerOffVM_Task, 'duration_secs': 0.19476} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.941477] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1288.941737] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1288.942078] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-321da302-44f7-4aef-a754-19f41ce2efb0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.014836] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1289.015075] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1289.015256] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleting the datastore file [datastore1] bc10286b-195f-48a2-b16c-f8f925ec7a2a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1289.015720] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75674ba6-3738-49c4-aca3-69c41b7ea428 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.024117] env[65758]: DEBUG oslo_vmware.api [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for the task: (returnval){ [ 1289.024117] env[65758]: value = "task-4661551" [ 1289.024117] env[65758]: _type = "Task" [ 1289.024117] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.034167] env[65758]: DEBUG oslo_vmware.api [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.124487] env[65758]: DEBUG nova.scheduler.client.report [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1289.190867] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661549, 'name': CloneVM_Task} progress is 94%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.534297] env[65758]: DEBUG oslo_vmware.api [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Task: {'id': task-4661551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145214} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.534549] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.534738] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.534904] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.535085] env[65758]: INFO nova.compute.manager [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1289.535337] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1289.535569] env[65758]: DEBUG nova.compute.manager [-] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1289.535664] env[65758]: DEBUG nova.network.neutron [-] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1289.535908] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1289.536453] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1289.536727] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1289.573879] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1289.629754] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1289.648520] env[65758]: INFO nova.scheduler.client.report [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted allocations for instance 8b2cfaf8-dd34-4262-a867-613502a964ca [ 1289.691873] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661549, 'name': CloneVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.031875] env[65758]: DEBUG nova.compute.manager [req-9e89681f-4c97-4593-a7bf-228e4919160b req-5ea313dd-19f5-45b2-8f0a-9559d1b88d24 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Received event network-vif-deleted-f3dcfa87-c097-4b94-bab6-e9fd7455605b {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1290.032102] env[65758]: INFO nova.compute.manager [req-9e89681f-4c97-4593-a7bf-228e4919160b req-5ea313dd-19f5-45b2-8f0a-9559d1b88d24 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Neutron deleted interface f3dcfa87-c097-4b94-bab6-e9fd7455605b; detaching it from the instance and deleting it from the info cache [ 1290.032255] env[65758]: DEBUG nova.network.neutron [req-9e89681f-4c97-4593-a7bf-228e4919160b req-5ea313dd-19f5-45b2-8f0a-9559d1b88d24 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1290.159383] env[65758]: DEBUG oslo_concurrency.lockutils [None req-ca93ab32-fd74-4290-8b25-0dd5e1924fd7 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "8b2cfaf8-dd34-4262-a867-613502a964ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.686s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.193763] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661549, 'name': CloneVM_Task, 'duration_secs': 1.025634} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.193941] env[65758]: INFO nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Created linked-clone VM from snapshot [ 1290.194907] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4200b8f-1efb-4dd6-8aac-e5f06b4643f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.203386] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Uploading image c785acf9-ab1e-448c-a793-d1eed56d0b17 {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1290.232394] env[65758]: DEBUG oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1290.232394] env[65758]: value = "vm-910099" [ 1290.232394] env[65758]: _type = "VirtualMachine" [ 1290.232394] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1290.232394] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-679c5f2c-3c0d-4b95-8f18-be5e5991b749 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.241045] env[65758]: DEBUG oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease: (returnval){ [ 1290.241045] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e6aad5-2570-097d-ff9c-98dceb7328b0" [ 1290.241045] env[65758]: _type = "HttpNfcLease" [ 1290.241045] env[65758]: } obtained for exporting VM: (result){ [ 1290.241045] env[65758]: value = "vm-910099" [ 1290.241045] env[65758]: _type = "VirtualMachine" [ 1290.241045] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1290.241469] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the lease: (returnval){ [ 1290.241469] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e6aad5-2570-097d-ff9c-98dceb7328b0" [ 1290.241469] env[65758]: _type = "HttpNfcLease" [ 1290.241469] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1290.249513] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1290.249513] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e6aad5-2570-097d-ff9c-98dceb7328b0" [ 1290.249513] env[65758]: _type = "HttpNfcLease" [ 1290.249513] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1290.509435] env[65758]: DEBUG nova.network.neutron [-] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1290.535886] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2414e5d4-f040-48de-a6cf-23d26ca93ea2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.550294] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d1e23a-3459-4566-a5a2-0c71fd6af0bd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.585149] env[65758]: DEBUG nova.compute.manager [req-9e89681f-4c97-4593-a7bf-228e4919160b req-5ea313dd-19f5-45b2-8f0a-9559d1b88d24 service nova] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Detach interface failed, port_id=f3dcfa87-c097-4b94-bab6-e9fd7455605b, reason: Instance bc10286b-195f-48a2-b16c-f8f925ec7a2a could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1290.750479] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1290.750479] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e6aad5-2570-097d-ff9c-98dceb7328b0" [ 1290.750479] env[65758]: _type = "HttpNfcLease" [ 1290.750479] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1290.750969] env[65758]: DEBUG oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1290.750969] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e6aad5-2570-097d-ff9c-98dceb7328b0" [ 1290.750969] env[65758]: _type = "HttpNfcLease" [ 1290.750969] env[65758]: }. {{(pid=65758) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1290.751557] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9767ceac-2420-4cc1-8a68-e454a7235474 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.760541] env[65758]: DEBUG oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52671362-ed92-55be-ec40-b2ab4fab13ed/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1290.760756] env[65758]: DEBUG oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52671362-ed92-55be-ec40-b2ab4fab13ed/disk-0.vmdk for reading. {{(pid=65758) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1290.848607] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-81793b0d-c005-45c0-bcb0-6d65670cfac9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.012962] env[65758]: INFO nova.compute.manager [-] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Took 1.48 seconds to deallocate network for instance. [ 1291.519664] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.520146] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.520376] env[65758]: DEBUG nova.objects.instance [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lazy-loading 'resources' on Instance uuid bc10286b-195f-48a2-b16c-f8f925ec7a2a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1291.543524] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "a125c33a-347c-4522-ac8e-e171fe92757a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.543857] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "a125c33a-347c-4522-ac8e-e171fe92757a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.045823] env[65758]: DEBUG nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1292.122211] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d78cb4-8ce5-47f6-90c5-086f2f96635e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.131169] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abb7771-4b6b-408b-8bed-b8524b451bca {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.165571] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c089f022-8463-4ae4-aa2e-7b14b0a3c86f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.174307] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab08b6a-d894-41ff-9caa-b4f8f1baeb6e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.189308] env[65758]: DEBUG nova.compute.provider_tree [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.570830] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.692990] env[65758]: DEBUG nova.scheduler.client.report [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1293.198417] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.201033] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.630s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.203176] env[65758]: INFO nova.compute.claims [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1293.221261] env[65758]: INFO nova.scheduler.client.report [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Deleted allocations for instance bc10286b-195f-48a2-b16c-f8f925ec7a2a [ 1293.729790] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fb6cea01-b8f1-429e-8082-916c18f41a5a tempest-AttachVolumeShelveTestJSON-1711437305 tempest-AttachVolumeShelveTestJSON-1711437305-project-member] Lock "bc10286b-195f-48a2-b16c-f8f925ec7a2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.827s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.293866] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534a7265-e7e8-4fd4-8c4d-930fb84cba80 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.302500] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4db9bc1-e0a0-4a99-bd3b-334d8499a862 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.334989] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b77fc9a-c07f-48c5-b61a-91638ff579a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.344137] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da25f6d9-ea51-4efa-a632-55579548bda6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.359482] env[65758]: DEBUG nova.compute.provider_tree [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.862876] env[65758]: DEBUG nova.scheduler.client.report [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1295.370072] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.370648] env[65758]: DEBUG nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1295.877538] env[65758]: DEBUG nova.compute.utils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1295.879724] env[65758]: DEBUG nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1295.880060] env[65758]: DEBUG nova.network.neutron [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1295.880543] env[65758]: WARNING neutronclient.v2_0.client [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1295.880939] env[65758]: WARNING neutronclient.v2_0.client [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1295.882029] env[65758]: WARNING openstack [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1295.882125] env[65758]: WARNING openstack [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1295.930435] env[65758]: DEBUG nova.policy [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc85d2d1d84f4df0b4de5e6388bb9398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82bfbb5ee6714c9aa5119cb714d28ce2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1296.229641] env[65758]: DEBUG nova.network.neutron [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Successfully created port: 423be816-871a-4c33-87b9-aa8d4eb66911 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1296.392258] env[65758]: DEBUG nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1297.406200] env[65758]: DEBUG nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1297.438408] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1297.438670] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1297.438822] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1297.439012] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1297.439159] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1297.439299] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1297.439501] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1297.439660] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1297.439820] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1297.439980] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1297.440176] env[65758]: DEBUG nova.virt.hardware [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1297.441080] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76261dc2-347e-4ee6-9d2c-256371c39ef1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.449966] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81db2287-5c3d-4278-9db2-7fce818cb6f2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.644799] env[65758]: DEBUG oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52671362-ed92-55be-ec40-b2ab4fab13ed/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1297.645772] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04128d5b-025d-47fe-8d2a-7ae2cfbc8161 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.652918] env[65758]: DEBUG oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52671362-ed92-55be-ec40-b2ab4fab13ed/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1297.653104] env[65758]: ERROR oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52671362-ed92-55be-ec40-b2ab4fab13ed/disk-0.vmdk due to incomplete transfer. [ 1297.653358] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d869274d-1857-4bbb-b1a9-8e084a2d61f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.662174] env[65758]: DEBUG nova.compute.manager [req-278527e6-be0a-4a06-a0a7-1fe70e435f3c req-b4909df9-8ac3-4c17-96d9-886ea902b9b6 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Received event network-vif-plugged-423be816-871a-4c33-87b9-aa8d4eb66911 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1297.662174] env[65758]: DEBUG oslo_concurrency.lockutils [req-278527e6-be0a-4a06-a0a7-1fe70e435f3c req-b4909df9-8ac3-4c17-96d9-886ea902b9b6 service nova] Acquiring lock "a125c33a-347c-4522-ac8e-e171fe92757a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.662174] env[65758]: DEBUG oslo_concurrency.lockutils [req-278527e6-be0a-4a06-a0a7-1fe70e435f3c req-b4909df9-8ac3-4c17-96d9-886ea902b9b6 service nova] Lock "a125c33a-347c-4522-ac8e-e171fe92757a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.662174] env[65758]: DEBUG oslo_concurrency.lockutils [req-278527e6-be0a-4a06-a0a7-1fe70e435f3c req-b4909df9-8ac3-4c17-96d9-886ea902b9b6 service nova] Lock "a125c33a-347c-4522-ac8e-e171fe92757a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1297.662174] env[65758]: DEBUG nova.compute.manager [req-278527e6-be0a-4a06-a0a7-1fe70e435f3c req-b4909df9-8ac3-4c17-96d9-886ea902b9b6 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] No waiting events found dispatching network-vif-plugged-423be816-871a-4c33-87b9-aa8d4eb66911 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1297.662553] env[65758]: WARNING nova.compute.manager [req-278527e6-be0a-4a06-a0a7-1fe70e435f3c req-b4909df9-8ac3-4c17-96d9-886ea902b9b6 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Received unexpected event network-vif-plugged-423be816-871a-4c33-87b9-aa8d4eb66911 for instance with vm_state building and task_state spawning. [ 1297.664342] env[65758]: DEBUG oslo_vmware.rw_handles [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52671362-ed92-55be-ec40-b2ab4fab13ed/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1297.664784] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Uploaded image c785acf9-ab1e-448c-a793-d1eed56d0b17 to the Glance image server {{(pid=65758) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1297.667565] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Destroying the VM {{(pid=65758) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1297.668222] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8a542204-fbb2-4711-8d91-faba6304bd43 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.677342] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1297.677342] env[65758]: value = "task-4661554" [ 1297.677342] env[65758]: _type = "Task" [ 1297.677342] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.687101] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661554, 'name': Destroy_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.747102] env[65758]: DEBUG nova.network.neutron [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Successfully updated port: 423be816-871a-4c33-87b9-aa8d4eb66911 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1298.187683] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661554, 'name': Destroy_Task, 'duration_secs': 0.343261} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.187952] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Destroyed the VM [ 1298.188219] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Deleting Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1298.188495] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e614ab2a-4950-4430-97eb-7a84c647f4a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.195865] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1298.195865] env[65758]: value = "task-4661555" [ 1298.195865] env[65758]: _type = "Task" [ 1298.195865] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.204592] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661555, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.250516] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "refresh_cache-a125c33a-347c-4522-ac8e-e171fe92757a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.250732] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "refresh_cache-a125c33a-347c-4522-ac8e-e171fe92757a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.250957] env[65758]: DEBUG nova.network.neutron [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1298.706333] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661555, 'name': RemoveSnapshot_Task, 'duration_secs': 0.412337} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.706731] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Deleted Snapshot of the VM instance {{(pid=65758) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1298.706862] env[65758]: DEBUG nova.compute.manager [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1298.707676] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f0a4b0-6725-42d8-92ca-9877aeeb0b60 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.753545] env[65758]: WARNING openstack [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1298.753984] env[65758]: WARNING openstack [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1298.791124] env[65758]: DEBUG nova.network.neutron [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1298.882821] env[65758]: WARNING neutronclient.v2_0.client [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1298.883503] env[65758]: WARNING openstack [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1298.883870] env[65758]: WARNING openstack [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1298.966819] env[65758]: DEBUG nova.network.neutron [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Updating instance_info_cache with network_info: [{"id": "423be816-871a-4c33-87b9-aa8d4eb66911", "address": "fa:16:3e:e9:f9:e2", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap423be816-87", "ovs_interfaceid": "423be816-871a-4c33-87b9-aa8d4eb66911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1299.220195] env[65758]: INFO nova.compute.manager [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Shelve offloading [ 1299.469702] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "refresh_cache-a125c33a-347c-4522-ac8e-e171fe92757a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.470196] env[65758]: DEBUG nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Instance network_info: |[{"id": "423be816-871a-4c33-87b9-aa8d4eb66911", "address": "fa:16:3e:e9:f9:e2", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap423be816-87", "ovs_interfaceid": "423be816-871a-4c33-87b9-aa8d4eb66911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1299.470732] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:f9:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '423be816-871a-4c33-87b9-aa8d4eb66911', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1299.478608] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1299.478871] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1299.479125] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-433e5273-1145-4de8-ab1f-4f4bcfb1549c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.501659] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1299.501659] env[65758]: value = "task-4661556" [ 1299.501659] env[65758]: _type = "Task" [ 1299.501659] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.512423] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661556, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.690975] env[65758]: DEBUG nova.compute.manager [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Received event network-changed-423be816-871a-4c33-87b9-aa8d4eb66911 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1299.690975] env[65758]: DEBUG nova.compute.manager [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Refreshing instance network info cache due to event network-changed-423be816-871a-4c33-87b9-aa8d4eb66911. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1299.690975] env[65758]: DEBUG oslo_concurrency.lockutils [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] Acquiring lock "refresh_cache-a125c33a-347c-4522-ac8e-e171fe92757a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.690975] env[65758]: DEBUG oslo_concurrency.lockutils [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] Acquired lock "refresh_cache-a125c33a-347c-4522-ac8e-e171fe92757a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.691362] env[65758]: DEBUG nova.network.neutron [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Refreshing network info cache for port 423be816-871a-4c33-87b9-aa8d4eb66911 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1299.724558] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.726303] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8cc6a35-b18b-4ccc-b9be-1ef85cbe8af6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.734065] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1299.734065] env[65758]: value = "task-4661557" [ 1299.734065] env[65758]: _type = "Task" [ 1299.734065] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.743200] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.011940] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661556, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.195810] env[65758]: WARNING neutronclient.v2_0.client [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1300.196597] env[65758]: WARNING openstack [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1300.196967] env[65758]: WARNING openstack [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1300.249271] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] VM already powered off {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1300.249489] env[65758]: DEBUG nova.compute.manager [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1300.251840] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ec249f-6294-4045-8f89-c25d2f242ebd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.259778] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.259778] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.259900] env[65758]: DEBUG nova.network.neutron [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1300.365725] env[65758]: WARNING neutronclient.v2_0.client [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1300.366461] env[65758]: WARNING openstack [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1300.366857] env[65758]: WARNING openstack [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1300.471364] env[65758]: DEBUG nova.network.neutron [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Updated VIF entry in instance network info cache for port 423be816-871a-4c33-87b9-aa8d4eb66911. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1300.471777] env[65758]: DEBUG nova.network.neutron [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Updating instance_info_cache with network_info: [{"id": "423be816-871a-4c33-87b9-aa8d4eb66911", "address": "fa:16:3e:e9:f9:e2", "network": {"id": "51a4730c-620c-41a3-9a17-2643e980fe3e", "bridge": "br-int", "label": "tempest-ServersTestJSON-268053542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82bfbb5ee6714c9aa5119cb714d28ce2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap423be816-87", "ovs_interfaceid": "423be816-871a-4c33-87b9-aa8d4eb66911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1300.512724] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661556, 'name': CreateVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.762848] env[65758]: WARNING neutronclient.v2_0.client [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1300.763687] env[65758]: WARNING openstack [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1300.764137] env[65758]: WARNING openstack [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1300.946431] env[65758]: WARNING neutronclient.v2_0.client [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1300.947054] env[65758]: WARNING openstack [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1300.947505] env[65758]: WARNING openstack [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1300.974950] env[65758]: DEBUG oslo_concurrency.lockutils [req-6ad6248a-e2d4-4e45-9902-998b90df1fba req-2facc7b5-7406-45ae-bf63-40e3370e5131 service nova] Releasing lock "refresh_cache-a125c33a-347c-4522-ac8e-e171fe92757a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.018453] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661556, 'name': CreateVM_Task, 'duration_secs': 1.343409} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.018645] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1301.019344] env[65758]: WARNING neutronclient.v2_0.client [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1301.019637] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.019741] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1301.020099] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1301.020503] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51e4d437-1e19-431c-99cf-a087490f68e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.025773] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1301.025773] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52ed2892-c408-f933-2b36-46bc22e25d68" [ 1301.025773] env[65758]: _type = "Task" [ 1301.025773] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.035104] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ed2892-c408-f933-2b36-46bc22e25d68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.054275] env[65758]: DEBUG nova.network.neutron [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [{"id": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "address": "fa:16:3e:45:e5:9e", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61227a3e-82", "ovs_interfaceid": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1301.537559] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52ed2892-c408-f933-2b36-46bc22e25d68, 'name': SearchDatastore_Task, 'duration_secs': 0.01338} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.538028] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.538278] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1301.538516] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.538662] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1301.538837] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1301.539133] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f57f07cd-aa0c-4361-9b53-e109b4b86eb8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.549087] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1301.549288] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1301.550056] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dc05439-32e5-499b-b296-31f005fe88e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.556087] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1301.556087] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5281487a-28a8-06db-ad37-02a853be6585" [ 1301.556087] env[65758]: _type = "Task" [ 1301.556087] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.556591] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.556999] env[65758]: WARNING neutronclient.v2_0.client [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1301.557642] env[65758]: WARNING openstack [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1301.558017] env[65758]: WARNING openstack [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1301.563191] env[65758]: WARNING neutronclient.v2_0.client [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1301.574791] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5281487a-28a8-06db-ad37-02a853be6585, 'name': SearchDatastore_Task, 'duration_secs': 0.009674} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.575662] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e31cb857-11ad-475c-9372-bbbcaad3c92b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.582449] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1301.582449] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527b1ced-ab72-2e14-5edf-e3aba72ba0d2" [ 1301.582449] env[65758]: _type = "Task" [ 1301.582449] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.591319] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527b1ced-ab72-2e14-5edf-e3aba72ba0d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.790790] env[65758]: DEBUG nova.compute.manager [req-c5128fd8-6fe8-43a8-9555-2aa772755b3f req-fdb9dbf8-cc73-41d6-a936-a5b093393e35 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received event network-vif-unplugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1301.791041] env[65758]: DEBUG oslo_concurrency.lockutils [req-c5128fd8-6fe8-43a8-9555-2aa772755b3f req-fdb9dbf8-cc73-41d6-a936-a5b093393e35 service nova] Acquiring lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.791147] env[65758]: DEBUG oslo_concurrency.lockutils [req-c5128fd8-6fe8-43a8-9555-2aa772755b3f req-fdb9dbf8-cc73-41d6-a936-a5b093393e35 service nova] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.791315] env[65758]: DEBUG oslo_concurrency.lockutils [req-c5128fd8-6fe8-43a8-9555-2aa772755b3f req-fdb9dbf8-cc73-41d6-a936-a5b093393e35 service nova] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.791488] env[65758]: DEBUG nova.compute.manager [req-c5128fd8-6fe8-43a8-9555-2aa772755b3f req-fdb9dbf8-cc73-41d6-a936-a5b093393e35 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] No waiting events found dispatching network-vif-unplugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1301.791697] env[65758]: WARNING nova.compute.manager [req-c5128fd8-6fe8-43a8-9555-2aa772755b3f req-fdb9dbf8-cc73-41d6-a936-a5b093393e35 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received unexpected event network-vif-unplugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 for instance with vm_state shelved and task_state shelving_offloading. [ 1301.888557] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1301.889512] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042d0250-9189-4bde-ba9d-3f4364509b3b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.898373] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1301.898650] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74bb29ef-d470-4095-ba31-7ca7a3b3c3fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.980759] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1301.981026] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1301.981188] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleting the datastore file [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1301.981482] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72f2f76a-7acc-4fc1-aeef-075d33b4a93e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.990024] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1301.990024] env[65758]: value = "task-4661559" [ 1301.990024] env[65758]: _type = "Task" [ 1301.990024] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.999444] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.096035] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527b1ced-ab72-2e14-5edf-e3aba72ba0d2, 'name': SearchDatastore_Task, 'duration_secs': 0.00982} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.096283] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1302.096577] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a125c33a-347c-4522-ac8e-e171fe92757a/a125c33a-347c-4522-ac8e-e171fe92757a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1302.096849] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-927f154f-a7ef-497f-bb83-8b3896b5ee92 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.104171] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1302.104171] env[65758]: value = "task-4661560" [ 1302.104171] env[65758]: _type = "Task" [ 1302.104171] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.113076] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661560, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.502849] env[65758]: DEBUG oslo_vmware.api [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136291} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.503205] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1302.503401] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1302.503789] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1302.535622] env[65758]: INFO nova.scheduler.client.report [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted allocations for instance e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 [ 1302.618865] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661560, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.041412] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.041777] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.041909] env[65758]: DEBUG nova.objects.instance [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'resources' on Instance uuid e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.114963] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661560, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532772} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.115727] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] a125c33a-347c-4522-ac8e-e171fe92757a/a125c33a-347c-4522-ac8e-e171fe92757a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1303.115995] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1303.116295] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af87d90c-8c4c-4a7d-93b9-11726be2b39c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.124289] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1303.124289] env[65758]: value = "task-4661561" [ 1303.124289] env[65758]: _type = "Task" [ 1303.124289] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.134221] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661561, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.544451] env[65758]: DEBUG nova.objects.instance [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'numa_topology' on Instance uuid e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.633955] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661561, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063768} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.634281] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1303.635113] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e046209e-18b5-4b4a-a007-38b38bd910c4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.657857] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] a125c33a-347c-4522-ac8e-e171fe92757a/a125c33a-347c-4522-ac8e-e171fe92757a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1303.658281] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b23a5bd1-e432-4a5a-a69d-4f53996f46d5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.678586] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1303.678586] env[65758]: value = "task-4661562" [ 1303.678586] env[65758]: _type = "Task" [ 1303.678586] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.817273] env[65758]: DEBUG nova.compute.manager [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received event network-changed-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1303.817468] env[65758]: DEBUG nova.compute.manager [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Refreshing instance network info cache due to event network-changed-61227a3e-82c2-4ebf-b71b-b953b5667f90. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1303.817678] env[65758]: DEBUG oslo_concurrency.lockutils [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] Acquiring lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.817819] env[65758]: DEBUG oslo_concurrency.lockutils [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] Acquired lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.817975] env[65758]: DEBUG nova.network.neutron [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Refreshing network info cache for port 61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1304.047415] env[65758]: DEBUG nova.objects.base [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=65758) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1304.126231] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1f0ce0-21df-41fd-93fd-83939319cf2a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.134752] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c84a28e-15de-4547-ade0-0c7615393db9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.166535] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c54b1f-4001-4f11-85f4-86ff2de40740 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.174791] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af357100-fa81-4a8f-9db8-947dbfaa2518 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.189733] env[65758]: DEBUG nova.compute.provider_tree [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1304.196842] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661562, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.300161] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.320919] env[65758]: WARNING neutronclient.v2_0.client [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1304.321636] env[65758]: WARNING openstack [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1304.321979] env[65758]: WARNING openstack [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1304.500716] env[65758]: WARNING neutronclient.v2_0.client [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1304.501492] env[65758]: WARNING openstack [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1304.501853] env[65758]: WARNING openstack [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1304.588298] env[65758]: DEBUG nova.network.neutron [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updated VIF entry in instance network info cache for port 61227a3e-82c2-4ebf-b71b-b953b5667f90. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1304.588746] env[65758]: DEBUG nova.network.neutron [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [{"id": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "address": "fa:16:3e:45:e5:9e", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap61227a3e-82", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1304.689217] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661562, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.693210] env[65758]: DEBUG nova.scheduler.client.report [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1305.093029] env[65758]: DEBUG oslo_concurrency.lockutils [req-4e042422-549c-4e31-87be-14e884ae8b71 req-8ba8fcbc-96a7-4e37-b212-e87a2549996d service nova] Releasing lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.190923] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661562, 'name': ReconfigVM_Task, 'duration_secs': 1.344679} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.191415] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Reconfigured VM instance instance-0000007b to attach disk [datastore2] a125c33a-347c-4522-ac8e-e171fe92757a/a125c33a-347c-4522-ac8e-e171fe92757a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1305.192358] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ce6faf9-a922-4464-9c2c-0ed5c46364db {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.198042] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.203474] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1305.203474] env[65758]: value = "task-4661563" [ 1305.203474] env[65758]: _type = "Task" [ 1305.203474] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.216962] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661563, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.712636] env[65758]: DEBUG oslo_concurrency.lockutils [None req-de637d00-8e51-45e5-8251-28f9b9a416f1 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.649s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.713974] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.414s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.714277] env[65758]: INFO nova.compute.manager [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Unshelving [ 1305.722893] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661563, 'name': Rename_Task, 'duration_secs': 0.163367} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.723964] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1305.724270] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e05d130-f075-48d7-b761-834490e30477 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.732635] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1305.732635] env[65758]: value = "task-4661564" [ 1305.732635] env[65758]: _type = "Task" [ 1305.732635] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.741581] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.243549] env[65758]: DEBUG oslo_vmware.api [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661564, 'name': PowerOnVM_Task, 'duration_secs': 0.443516} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.243906] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1306.243969] env[65758]: INFO nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Took 8.84 seconds to spawn the instance on the hypervisor. [ 1306.244164] env[65758]: DEBUG nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1306.244934] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322fdb12-1006-45ec-8e26-33446c5cfae9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.739371] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.739715] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.739933] env[65758]: DEBUG nova.objects.instance [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'pci_requests' on Instance uuid e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1306.762578] env[65758]: INFO nova.compute.manager [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Took 14.21 seconds to build instance. [ 1307.191348] env[65758]: DEBUG oslo_concurrency.lockutils [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "a125c33a-347c-4522-ac8e-e171fe92757a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.244020] env[65758]: DEBUG nova.objects.instance [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'numa_topology' on Instance uuid e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1307.265179] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4746ced3-737a-4d1a-ab9e-a55013f29d88 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "a125c33a-347c-4522-ac8e-e171fe92757a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.721s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.265518] env[65758]: DEBUG oslo_concurrency.lockutils [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "a125c33a-347c-4522-ac8e-e171fe92757a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.074s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.265742] env[65758]: DEBUG nova.compute.manager [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1307.266724] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420388b1-8b3c-4b18-b3d0-a0e3142e2181 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.274718] env[65758]: DEBUG nova.compute.manager [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=65758) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3442}} [ 1307.275298] env[65758]: DEBUG nova.objects.instance [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lazy-loading 'flavor' on Instance uuid a125c33a-347c-4522-ac8e-e171fe92757a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1307.747057] env[65758]: INFO nova.compute.claims [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1308.282699] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1308.283111] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3355760b-46c4-47e0-a686-48f9ea49377e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.291552] env[65758]: DEBUG oslo_vmware.api [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1308.291552] env[65758]: value = "task-4661565" [ 1308.291552] env[65758]: _type = "Task" [ 1308.291552] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.303207] env[65758]: DEBUG oslo_vmware.api [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.802423] env[65758]: DEBUG oslo_vmware.api [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661565, 'name': PowerOffVM_Task, 'duration_secs': 0.190562} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.805254] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1308.805755] env[65758]: DEBUG nova.compute.manager [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1308.806587] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6563a550-0f21-4b77-987e-2979a50149fc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.842561] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da3f2e2-70d7-4c31-ae40-3b1de98fff68 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.852346] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc098ef-3398-445e-a878-a8658ed4eb67 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.884146] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2e88a0-ceb7-4d4e-8e1c-1fcfbf918091 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.893135] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5760da31-e2a1-48ba-8158-6e1e4420b243 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.909604] env[65758]: DEBUG nova.compute.provider_tree [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1309.130657] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.130915] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.131148] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.131326] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.131491] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.134267] env[65758]: INFO nova.compute.manager [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Terminating instance [ 1309.322050] env[65758]: DEBUG oslo_concurrency.lockutils [None req-665156a6-3491-4205-ad75-b295b0e0fec1 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "a125c33a-347c-4522-ac8e-e171fe92757a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.415195] env[65758]: DEBUG nova.scheduler.client.report [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1309.638863] env[65758]: DEBUG nova.compute.manager [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1309.639306] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1309.640304] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac83168-2675-4819-a8b8-bd387dee7280 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.649031] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1309.649355] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7577c77-d8ab-4aab-b159-93fec1b29444 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.658493] env[65758]: DEBUG oslo_vmware.api [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1309.658493] env[65758]: value = "task-4661566" [ 1309.658493] env[65758]: _type = "Task" [ 1309.658493] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.671288] env[65758]: DEBUG oslo_vmware.api [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.920321] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.180s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.920655] env[65758]: WARNING neutronclient.v2_0.client [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1309.923800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "a125c33a-347c-4522-ac8e-e171fe92757a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.923800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "a125c33a-347c-4522-ac8e-e171fe92757a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.923800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "a125c33a-347c-4522-ac8e-e171fe92757a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.924039] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "a125c33a-347c-4522-ac8e-e171fe92757a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.924039] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "a125c33a-347c-4522-ac8e-e171fe92757a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.925973] env[65758]: INFO nova.compute.manager [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Terminating instance [ 1309.952234] env[65758]: INFO nova.network.neutron [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating port 61227a3e-82c2-4ebf-b71b-b953b5667f90 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1310.169582] env[65758]: DEBUG oslo_vmware.api [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661566, 'name': PowerOffVM_Task, 'duration_secs': 0.197123} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.169833] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1310.169995] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1310.170368] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8831c47c-43fc-4660-9d63-6984174eb47e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.241232] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1310.241615] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Deleting contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1310.241782] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleting the datastore file [datastore1] e505f8e8-0612-4fe7-bcd2-73fdd39458fa {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1310.242153] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af8b500a-23d1-4b08-9315-21837a000b25 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.250500] env[65758]: DEBUG oslo_vmware.api [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1310.250500] env[65758]: value = "task-4661568" [ 1310.250500] env[65758]: _type = "Task" [ 1310.250500] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.260226] env[65758]: DEBUG oslo_vmware.api [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.429176] env[65758]: DEBUG nova.compute.manager [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1310.429649] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1310.430306] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f102da04-4989-44fb-b2e2-10db6045881d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.438345] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1310.438584] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8c3ccfe-1380-46e8-8ff8-e65c6efce981 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.478838] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.503404] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1310.503645] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1310.503812] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleting the datastore file [datastore2] a125c33a-347c-4522-ac8e-e171fe92757a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1310.504113] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e63ee5f-5e3f-477f-a44e-42662e747aee {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.512145] env[65758]: DEBUG oslo_vmware.api [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1310.512145] env[65758]: value = "task-4661570" [ 1310.512145] env[65758]: _type = "Task" [ 1310.512145] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.521164] env[65758]: DEBUG oslo_vmware.api [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.761899] env[65758]: DEBUG oslo_vmware.api [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14243} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.762113] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1310.762292] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Deleted contents of the VM from datastore datastore1 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1310.762456] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1310.762622] env[65758]: INFO nova.compute.manager [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1310.762864] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1310.763064] env[65758]: DEBUG nova.compute.manager [-] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1310.763161] env[65758]: DEBUG nova.network.neutron [-] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1310.763399] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1310.763960] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1310.764236] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1310.800027] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1310.982718] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.982997] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.983289] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.983482] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1310.984639] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd59bb5f-c5dd-44c7-8a44-f766a937b4c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.993793] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f610a4-d7d8-498c-b693-1ac5b5796b90 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.009449] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba7d439-e00f-4e90-81ad-3ec9acfbb612 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.018495] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742895e1-bec9-4e97-b0b8-9dfc2749d12a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.024305] env[65758]: DEBUG oslo_vmware.api [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151772} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.024942] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1311.025236] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1311.025368] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1311.025660] env[65758]: INFO nova.compute.manager [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1311.025977] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1311.026230] env[65758]: DEBUG nova.compute.manager [-] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1311.026330] env[65758]: DEBUG nova.network.neutron [-] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1311.026574] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1311.027142] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1311.027445] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1311.057853] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179808MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1311.058101] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.058243] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.299997] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1311.310887] env[65758]: DEBUG nova.compute.manager [req-293632f9-64c6-4001-9eb1-baea2cb6c9c5 req-7f6dba2a-5e5d-44f5-87d2-683f3ea3f0b4 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Received event network-vif-deleted-72e1a94b-418a-4f03-a5c3-8876b1d7f3d3 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1311.311104] env[65758]: INFO nova.compute.manager [req-293632f9-64c6-4001-9eb1-baea2cb6c9c5 req-7f6dba2a-5e5d-44f5-87d2-683f3ea3f0b4 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Neutron deleted interface 72e1a94b-418a-4f03-a5c3-8876b1d7f3d3; detaching it from the instance and deleting it from the info cache [ 1311.311320] env[65758]: DEBUG nova.network.neutron [req-293632f9-64c6-4001-9eb1-baea2cb6c9c5 req-7f6dba2a-5e5d-44f5-87d2-683f3ea3f0b4 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1311.461857] env[65758]: DEBUG nova.compute.manager [req-e7716347-7251-4b8f-82cc-a06edd172352 req-5ca4c2db-62e9-4e54-823d-96e78143d43b service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received event network-vif-plugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1311.462337] env[65758]: DEBUG oslo_concurrency.lockutils [req-e7716347-7251-4b8f-82cc-a06edd172352 req-5ca4c2db-62e9-4e54-823d-96e78143d43b service nova] Acquiring lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.462447] env[65758]: DEBUG oslo_concurrency.lockutils [req-e7716347-7251-4b8f-82cc-a06edd172352 req-5ca4c2db-62e9-4e54-823d-96e78143d43b service nova] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.462615] env[65758]: DEBUG oslo_concurrency.lockutils [req-e7716347-7251-4b8f-82cc-a06edd172352 req-5ca4c2db-62e9-4e54-823d-96e78143d43b service nova] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.462773] env[65758]: DEBUG nova.compute.manager [req-e7716347-7251-4b8f-82cc-a06edd172352 req-5ca4c2db-62e9-4e54-823d-96e78143d43b service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] No waiting events found dispatching network-vif-plugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1311.462938] env[65758]: WARNING nova.compute.manager [req-e7716347-7251-4b8f-82cc-a06edd172352 req-5ca4c2db-62e9-4e54-823d-96e78143d43b service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received unexpected event network-vif-plugged-61227a3e-82c2-4ebf-b71b-b953b5667f90 for instance with vm_state shelved_offloaded and task_state spawning. [ 1311.552810] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.553354] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.553591] env[65758]: DEBUG nova.network.neutron [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1311.725178] env[65758]: DEBUG nova.network.neutron [-] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1311.814751] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c7d0cbb-3259-4b64-9be6-59a160634628 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.826167] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a011a778-9d89-49bc-b198-640716187530 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.854819] env[65758]: DEBUG nova.compute.manager [req-293632f9-64c6-4001-9eb1-baea2cb6c9c5 req-7f6dba2a-5e5d-44f5-87d2-683f3ea3f0b4 service nova] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Detach interface failed, port_id=72e1a94b-418a-4f03-a5c3-8876b1d7f3d3, reason: Instance e505f8e8-0612-4fe7-bcd2-73fdd39458fa could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1312.014251] env[65758]: DEBUG nova.network.neutron [-] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1312.056418] env[65758]: WARNING neutronclient.v2_0.client [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1312.057571] env[65758]: WARNING openstack [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1312.057571] env[65758]: WARNING openstack [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1312.088340] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance cdc1cfab-4f75-4caf-a4ee-8197af083353 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1312.088570] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a9550f72-009c-4143-afe2-887727e5c071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1312.088688] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e505f8e8-0612-4fe7-bcd2-73fdd39458fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1312.088808] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance a125c33a-347c-4522-ac8e-e171fe92757a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1312.088937] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1312.089147] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1312.089301] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '5', 'num_vm_active': '3', 'num_task_None': '2', 'num_os_type_None': '5', 'num_proj_82bfbb5ee6714c9aa5119cb714d28ce2': '2', 'io_workload': '0', 'num_proj_f32b2100e0824c56ab852e0d1bb37e87': '2', 'num_vm_shelved_offloaded': '1', 'num_task_spawning': '1', 'num_proj_4095654557a34bb0907071aedb3bb678': '1', 'num_task_deleting': '2', 'num_vm_stopped': '1'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1312.170872] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859f7cfc-a1c9-4933-80c8-3dd16638d669 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.179989] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bc4ea5-aa5a-4ba9-aab9-cf0c22afca32 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.215982] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb263054-50f5-43cf-931d-8ace5d4a3cf5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.224496] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b72b7cb-7d10-4476-9b4b-64bbd7f503ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.228794] env[65758]: INFO nova.compute.manager [-] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Took 1.47 seconds to deallocate network for instance. [ 1312.241816] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.279364] env[65758]: WARNING neutronclient.v2_0.client [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1312.280144] env[65758]: WARNING openstack [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1312.280521] env[65758]: WARNING openstack [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1312.374368] env[65758]: DEBUG nova.network.neutron [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [{"id": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "address": "fa:16:3e:45:e5:9e", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61227a3e-82", "ovs_interfaceid": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1312.516761] env[65758]: INFO nova.compute.manager [-] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Took 1.49 seconds to deallocate network for instance. [ 1312.745538] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.748586] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1312.877726] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.906487] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='5daf43c8146b1dc72f0465588a436fac',container_format='bare',created_at=2025-11-21T13:25:39Z,direct_url=,disk_format='vmdk',id=c785acf9-ab1e-448c-a793-d1eed56d0b17,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-854233248-shelved',owner='4095654557a34bb0907071aedb3bb678',properties=ImageMetaProps,protected=,size=31670784,status='active',tags=,updated_at=2025-11-21T13:25:52Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1312.906768] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1312.906921] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1312.907111] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1312.907252] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1312.907391] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1312.907592] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1312.907772] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1312.907983] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1312.908194] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1312.908337] env[65758]: DEBUG nova.virt.hardware [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1312.909248] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9368298-730f-4889-81d7-a0cbb72731e2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.918317] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00177fe0-40d3-4282-bb4e-7d5904e62fc6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.932979] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:e5:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '438671d0-9468-4e44-84c1-4c0ebaa743e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61227a3e-82c2-4ebf-b71b-b953b5667f90', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1312.940474] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1312.940747] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1312.940982] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07ace86c-89b5-4244-90f1-974b963b2e05 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.960670] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1312.960670] env[65758]: value = "task-4661571" [ 1312.960670] env[65758]: _type = "Task" [ 1312.960670] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.969667] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661571, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.024210] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.253050] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1313.253295] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.195s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.253750] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.508s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.254071] env[65758]: DEBUG nova.objects.instance [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'resources' on Instance uuid e505f8e8-0612-4fe7-bcd2-73fdd39458fa {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1313.338654] env[65758]: DEBUG nova.compute.manager [req-d4bf7f94-fc0e-4b1e-8774-51c760cd7a02 req-a3691f3d-0116-4187-9f83-e7b323001624 service nova] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Received event network-vif-deleted-423be816-871a-4c33-87b9-aa8d4eb66911 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1313.472744] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661571, 'name': CreateVM_Task, 'duration_secs': 0.339627} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.473007] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1313.473544] env[65758]: WARNING neutronclient.v2_0.client [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1313.473940] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.474165] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.474531] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1313.474799] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe365ce2-569e-4016-b32a-2df4abcd7f29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.480471] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1313.480471] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52aba64b-7f14-8f1f-b9e2-2968e53fa873" [ 1313.480471] env[65758]: _type = "Task" [ 1313.480471] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.492188] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52aba64b-7f14-8f1f-b9e2-2968e53fa873, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.494591] env[65758]: DEBUG nova.compute.manager [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received event network-changed-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1313.494768] env[65758]: DEBUG nova.compute.manager [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Refreshing instance network info cache due to event network-changed-61227a3e-82c2-4ebf-b71b-b953b5667f90. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1313.495029] env[65758]: DEBUG oslo_concurrency.lockutils [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] Acquiring lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.495214] env[65758]: DEBUG oslo_concurrency.lockutils [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] Acquired lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.495394] env[65758]: DEBUG nova.network.neutron [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Refreshing network info cache for port 61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1313.836516] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc3e0ac-1864-46f3-b0ad-44cd5e5b332a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.844598] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670efbd6-381f-47dc-8a5f-1efe4f402d61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.874813] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403da6e9-254b-4acd-ac37-081d3631ba1b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.882979] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7b6c05-b3e1-4420-8656-51d2871c27f8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.896873] env[65758]: DEBUG nova.compute.provider_tree [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.991830] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.992177] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Processing image c785acf9-ab1e-448c-a793-d1eed56d0b17 {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1313.992420] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17/c785acf9-ab1e-448c-a793-d1eed56d0b17.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.992565] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17/c785acf9-ab1e-448c-a793-d1eed56d0b17.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.992744] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1313.993035] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ace275b-a807-4aec-b508-4d71340d197b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.998026] env[65758]: WARNING neutronclient.v2_0.client [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1313.998670] env[65758]: WARNING openstack [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1313.999033] env[65758]: WARNING openstack [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1314.009762] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1314.009984] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1314.010757] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a1622e6-36a7-41a2-9ac3-642b3bb043aa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.017424] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1314.017424] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52781cd0-481a-5c6f-e853-b3961c40bfd0" [ 1314.017424] env[65758]: _type = "Task" [ 1314.017424] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.027393] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52781cd0-481a-5c6f-e853-b3961c40bfd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.185775] env[65758]: WARNING neutronclient.v2_0.client [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1314.186471] env[65758]: WARNING openstack [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1314.186835] env[65758]: WARNING openstack [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1314.266929] env[65758]: DEBUG nova.network.neutron [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updated VIF entry in instance network info cache for port 61227a3e-82c2-4ebf-b71b-b953b5667f90. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1314.267311] env[65758]: DEBUG nova.network.neutron [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [{"id": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "address": "fa:16:3e:45:e5:9e", "network": {"id": "5f03c6fd-f429-4c64-8d2b-6cf1fb4890b9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1937229271-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4095654557a34bb0907071aedb3bb678", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61227a3e-82", "ovs_interfaceid": "61227a3e-82c2-4ebf-b71b-b953b5667f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1314.400412] env[65758]: DEBUG nova.scheduler.client.report [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1314.528238] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Preparing fetch location {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1314.528442] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Fetch image to [datastore2] OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae/OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae.vmdk {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1314.528601] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Downloading stream optimized image c785acf9-ab1e-448c-a793-d1eed56d0b17 to [datastore2] OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae/OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae.vmdk on the data store datastore2 as vApp {{(pid=65758) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1314.528762] env[65758]: DEBUG nova.virt.vmwareapi.images [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Downloading image file data c785acf9-ab1e-448c-a793-d1eed56d0b17 to the ESX as VM named 'OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae' {{(pid=65758) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1314.607412] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1314.607412] env[65758]: value = "resgroup-9" [ 1314.607412] env[65758]: _type = "ResourcePool" [ 1314.607412] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1314.607695] env[65758]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a62c6615-1e59-446b-8ed1-bd4af8adcb18 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.629744] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease: (returnval){ [ 1314.629744] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52537066-0a9e-dde7-5d1d-7e22fd6fb1cd" [ 1314.629744] env[65758]: _type = "HttpNfcLease" [ 1314.629744] env[65758]: } obtained for vApp import into resource pool (val){ [ 1314.629744] env[65758]: value = "resgroup-9" [ 1314.629744] env[65758]: _type = "ResourcePool" [ 1314.629744] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1314.630313] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the lease: (returnval){ [ 1314.630313] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52537066-0a9e-dde7-5d1d-7e22fd6fb1cd" [ 1314.630313] env[65758]: _type = "HttpNfcLease" [ 1314.630313] env[65758]: } to be ready. {{(pid=65758) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1314.637172] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1314.637172] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52537066-0a9e-dde7-5d1d-7e22fd6fb1cd" [ 1314.637172] env[65758]: _type = "HttpNfcLease" [ 1314.637172] env[65758]: } is initializing. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1314.770384] env[65758]: DEBUG oslo_concurrency.lockutils [req-7cad7b1c-b72d-4546-80e5-d081af5d7af7 req-18910dad-4b15-4dab-905e-4b37d3ab7b7d service nova] Releasing lock "refresh_cache-e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.906060] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.908747] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.885s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.909178] env[65758]: DEBUG nova.objects.instance [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lazy-loading 'resources' on Instance uuid a125c33a-347c-4522-ac8e-e171fe92757a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1314.927108] env[65758]: INFO nova.scheduler.client.report [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleted allocations for instance e505f8e8-0612-4fe7-bcd2-73fdd39458fa [ 1315.139437] env[65758]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1315.139437] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52537066-0a9e-dde7-5d1d-7e22fd6fb1cd" [ 1315.139437] env[65758]: _type = "HttpNfcLease" [ 1315.139437] env[65758]: } is ready. {{(pid=65758) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1315.139819] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1315.139819] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52537066-0a9e-dde7-5d1d-7e22fd6fb1cd" [ 1315.139819] env[65758]: _type = "HttpNfcLease" [ 1315.139819] env[65758]: }. {{(pid=65758) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1315.140685] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171e58b3-8af6-4ac8-a257-9a0d33191c0d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.149368] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e28709-4103-40a8-d42c-040a3fe1d64e/disk-0.vmdk from lease info. {{(pid=65758) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1315.149603] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating HTTP connection to write to file with size = 31670784 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e28709-4103-40a8-d42c-040a3fe1d64e/disk-0.vmdk. {{(pid=65758) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1315.213538] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-139cb71a-b289-4912-ae75-7f5b3a861791 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.255308] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.255565] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.440034] env[65758]: DEBUG oslo_concurrency.lockutils [None req-6ec158a8-d72d-4a1a-a01a-c78f73f6d8af tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "e505f8e8-0612-4fe7-bcd2-73fdd39458fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.309s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.473140] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.478085] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.496836] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696539fe-c564-49e4-a2d0-cfe77c594bcb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.508516] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36826b19-a246-4781-a583-376037193510 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.547942] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438bcfae-20cb-4048-8803-ea549fa6aa8a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.557459] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ca83a2-3d65-434d-9a1f-75020439d167 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.574926] env[65758]: DEBUG nova.compute.provider_tree [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.082656] env[65758]: DEBUG nova.scheduler.client.report [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1316.310531] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Completed reading data from the image iterator. {{(pid=65758) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1316.310799] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e28709-4103-40a8-d42c-040a3fe1d64e/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1316.311713] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd29ad9-2a28-4ebc-afcd-6a5ac8a45ca4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.319615] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e28709-4103-40a8-d42c-040a3fe1d64e/disk-0.vmdk is in state: ready. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1316.319847] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e28709-4103-40a8-d42c-040a3fe1d64e/disk-0.vmdk. {{(pid=65758) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1316.320081] env[65758]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-2ed2d024-3918-4625-b6eb-09cc2e96ac08 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.479188] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.506513] env[65758]: DEBUG oslo_vmware.rw_handles [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e28709-4103-40a8-d42c-040a3fe1d64e/disk-0.vmdk. {{(pid=65758) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1316.506930] env[65758]: INFO nova.virt.vmwareapi.images [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Downloaded image file data c785acf9-ab1e-448c-a793-d1eed56d0b17 [ 1316.507905] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbb8e80-198f-44de-b3e5-1ce98435b0d8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.525458] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c35a45d9-4ca5-46f5-a5d8-5351cacbf21b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.564293] env[65758]: INFO nova.virt.vmwareapi.images [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] The imported VM was unregistered [ 1316.567316] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Caching image {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1316.567614] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Creating directory with path [datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17 {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1316.567965] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8994c898-4244-4e2a-a4a1-3deeaeca7303 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.580117] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Created directory with path [datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17 {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1316.580319] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae/OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae.vmdk to [datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17/c785acf9-ab1e-448c-a793-d1eed56d0b17.vmdk. {{(pid=65758) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1316.580588] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-9d748fea-9803-4363-96c5-58269ab74205 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.589368] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1316.591481] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1316.591481] env[65758]: value = "task-4661574" [ 1316.591481] env[65758]: _type = "Task" [ 1316.591481] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.600523] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661574, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.608241] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "a9550f72-009c-4143-afe2-887727e5c071" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.608487] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.633702] env[65758]: INFO nova.scheduler.client.report [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted allocations for instance a125c33a-347c-4522-ac8e-e171fe92757a [ 1317.104589] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661574, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.111708] env[65758]: INFO nova.compute.manager [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Detaching volume d86bceca-2f54-4f80-89ad-662fb3a8104a [ 1317.145519] env[65758]: DEBUG oslo_concurrency.lockutils [None req-d93461a9-e55c-48f7-849d-a41a56ba18da tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "a125c33a-347c-4522-ac8e-e171fe92757a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.222s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.157692] env[65758]: INFO nova.virt.block_device [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Attempting to driver detach volume d86bceca-2f54-4f80-89ad-662fb3a8104a from mountpoint /dev/sdb [ 1317.158041] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1317.158314] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910093', 'volume_id': 'd86bceca-2f54-4f80-89ad-662fb3a8104a', 'name': 'volume-d86bceca-2f54-4f80-89ad-662fb3a8104a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9550f72-009c-4143-afe2-887727e5c071', 'attached_at': '', 'detached_at': '', 'volume_id': 'd86bceca-2f54-4f80-89ad-662fb3a8104a', 'serial': 'd86bceca-2f54-4f80-89ad-662fb3a8104a'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1317.159667] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c71c88d-e7ec-4242-979c-786da5df59d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.186688] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eaf36ed-74d9-4bc3-a687-d53784fc4a3e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.196916] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab81ec8d-33cc-4032-a70d-f770cc10d2a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.228466] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd84dc27-8794-4ce5-aad0-e7194b180645 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.245393] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The volume has not been displaced from its original location: [datastore2] volume-d86bceca-2f54-4f80-89ad-662fb3a8104a/volume-d86bceca-2f54-4f80-89ad-662fb3a8104a.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1317.252400] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Reconfiguring VM instance instance-00000070 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1317.252400] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d240dbc-6698-48d6-8bbc-2ebf8b890f5a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.273687] env[65758]: DEBUG oslo_vmware.api [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1317.273687] env[65758]: value = "task-4661575" [ 1317.273687] env[65758]: _type = "Task" [ 1317.273687] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.283709] env[65758]: DEBUG oslo_vmware.api [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661575, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.478995] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1317.479407] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1317.479562] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1317.603252] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661574, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.786645] env[65758]: DEBUG oslo_vmware.api [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661575, 'name': ReconfigVM_Task, 'duration_secs': 0.335822} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.786987] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Reconfigured VM instance instance-00000070 to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1317.791871] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-304ce4c1-a8e5-48cd-be2a-6dd78fee24b8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.810186] env[65758]: DEBUG oslo_vmware.api [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1317.810186] env[65758]: value = "task-4661576" [ 1317.810186] env[65758]: _type = "Task" [ 1317.810186] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.820330] env[65758]: DEBUG oslo_vmware.api [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661576, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.905228] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.905472] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.905871] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "cdc1cfab-4f75-4caf-a4ee-8197af083353-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.905961] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.906192] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.908580] env[65758]: INFO nova.compute.manager [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Terminating instance [ 1318.104482] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661574, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.321459] env[65758]: DEBUG oslo_vmware.api [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661576, 'name': ReconfigVM_Task, 'duration_secs': 0.205695} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.321873] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910093', 'volume_id': 'd86bceca-2f54-4f80-89ad-662fb3a8104a', 'name': 'volume-d86bceca-2f54-4f80-89ad-662fb3a8104a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9550f72-009c-4143-afe2-887727e5c071', 'attached_at': '', 'detached_at': '', 'volume_id': 'd86bceca-2f54-4f80-89ad-662fb3a8104a', 'serial': 'd86bceca-2f54-4f80-89ad-662fb3a8104a'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1318.413343] env[65758]: DEBUG nova.compute.manager [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1318.413566] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1318.414618] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbfd1d6-a009-4dc0-a4c1-de64c926d8b0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.423366] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1318.423647] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d868fc5a-788b-4f1b-a81c-1950c4a756b2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.431885] env[65758]: DEBUG oslo_vmware.api [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1318.431885] env[65758]: value = "task-4661577" [ 1318.431885] env[65758]: _type = "Task" [ 1318.431885] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.443028] env[65758]: DEBUG oslo_vmware.api [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.606910] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661574, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.881917] env[65758]: DEBUG nova.objects.instance [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'flavor' on Instance uuid a9550f72-009c-4143-afe2-887727e5c071 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.943518] env[65758]: DEBUG oslo_vmware.api [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661577, 'name': PowerOffVM_Task, 'duration_secs': 0.22675} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.943820] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1318.944030] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1318.944291] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee6588f3-f635-49eb-9507-364602527efc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.013700] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1319.013991] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1319.014169] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleting the datastore file [datastore2] cdc1cfab-4f75-4caf-a4ee-8197af083353 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1319.014494] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0ee1a65-b9e7-4d2f-8b3d-9d4d0caafd96 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.022577] env[65758]: DEBUG oslo_vmware.api [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for the task: (returnval){ [ 1319.022577] env[65758]: value = "task-4661579" [ 1319.022577] env[65758]: _type = "Task" [ 1319.022577] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.032191] env[65758]: DEBUG oslo_vmware.api [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.105843] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661574, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.534562] env[65758]: DEBUG oslo_vmware.api [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Task: {'id': task-4661579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427694} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.534952] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1319.535084] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1319.535287] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1319.535545] env[65758]: INFO nova.compute.manager [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1319.535902] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1319.536157] env[65758]: DEBUG nova.compute.manager [-] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1319.536303] env[65758]: DEBUG nova.network.neutron [-] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1319.536609] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1319.537344] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1319.537676] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1319.576672] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1319.608582] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661574, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.774264} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.608873] env[65758]: INFO nova.virt.vmwareapi.ds_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae/OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae.vmdk to [datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17/c785acf9-ab1e-448c-a793-d1eed56d0b17.vmdk. [ 1319.609092] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Cleaning up location [datastore2] OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1319.609375] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_e08daf82-895a-41b4-8383-3021655bd4ae {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1319.609665] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00d44ed9-07da-4509-b572-4410426661e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.617386] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1319.617386] env[65758]: value = "task-4661580" [ 1319.617386] env[65758]: _type = "Task" [ 1319.617386] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.626804] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.837222] env[65758]: DEBUG nova.compute.manager [req-29fb1bdc-47e2-4549-b261-114eda6f8643 req-d0d1cc83-4f9d-44eb-bdad-bc141a5ca5fc service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Received event network-vif-deleted-9084cee5-02d7-477c-8464-d70e0bfd1ef8 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1319.837356] env[65758]: INFO nova.compute.manager [req-29fb1bdc-47e2-4549-b261-114eda6f8643 req-d0d1cc83-4f9d-44eb-bdad-bc141a5ca5fc service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Neutron deleted interface 9084cee5-02d7-477c-8464-d70e0bfd1ef8; detaching it from the instance and deleting it from the info cache [ 1319.837527] env[65758]: DEBUG nova.network.neutron [req-29fb1bdc-47e2-4549-b261-114eda6f8643 req-d0d1cc83-4f9d-44eb-bdad-bc141a5ca5fc service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1319.888798] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b2ba69cb-a906-4afa-90f0-d7c667b97056 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.280s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.127389] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036201} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.127673] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1320.127801] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17/c785acf9-ab1e-448c-a793-d1eed56d0b17.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.128052] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17/c785acf9-ab1e-448c-a793-d1eed56d0b17.vmdk to [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742/e5b042e0-3dba-4bfe-9e4d-1d55bcb72742.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1320.128328] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a54e3aad-1231-4293-84b3-df7465243dcc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.136203] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1320.136203] env[65758]: value = "task-4661581" [ 1320.136203] env[65758]: _type = "Task" [ 1320.136203] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.146815] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661581, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.315165] env[65758]: DEBUG nova.network.neutron [-] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1320.340023] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-596f0dfb-15d9-4216-b630-084f4aecfe78 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.351693] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6e9ee0-0ff4-4f9b-a6de-f03bf1ebbfa9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.381714] env[65758]: DEBUG nova.compute.manager [req-29fb1bdc-47e2-4549-b261-114eda6f8643 req-d0d1cc83-4f9d-44eb-bdad-bc141a5ca5fc service nova] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Detach interface failed, port_id=9084cee5-02d7-477c-8464-d70e0bfd1ef8, reason: Instance cdc1cfab-4f75-4caf-a4ee-8197af083353 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1320.647375] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661581, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.818634] env[65758]: INFO nova.compute.manager [-] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Took 1.28 seconds to deallocate network for instance. [ 1320.898668] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "a9550f72-009c-4143-afe2-887727e5c071" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.899174] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.899444] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "a9550f72-009c-4143-afe2-887727e5c071-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.899631] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.899800] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.902592] env[65758]: INFO nova.compute.manager [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Terminating instance [ 1321.147307] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661581, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.326581] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.326919] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.327176] env[65758]: DEBUG nova.objects.instance [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lazy-loading 'resources' on Instance uuid cdc1cfab-4f75-4caf-a4ee-8197af083353 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.407456] env[65758]: DEBUG nova.compute.manager [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1321.407614] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1321.408619] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a882d69-e60a-494d-bf26-97a049de971f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.417843] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1321.418146] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00d7a9bf-cac5-47e3-b1b6-630ef5a9e652 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.425899] env[65758]: DEBUG oslo_vmware.api [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1321.425899] env[65758]: value = "task-4661582" [ 1321.425899] env[65758]: _type = "Task" [ 1321.425899] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.435436] env[65758]: DEBUG oslo_vmware.api [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.650541] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661581, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.888832] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50d70d5-b953-4784-bc79-515acebc4a13 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.897255] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e7810e-4715-428b-b419-6cdae38a796e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.931843] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4c9791-0564-42a1-aa56-ea39c4c8dc0a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.940658] env[65758]: DEBUG oslo_vmware.api [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661582, 'name': PowerOffVM_Task, 'duration_secs': 0.253116} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.943089] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.943301] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1321.943671] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a081d01d-0b83-4933-9eae-7547027631e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.946404] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e691306-c842-4741-88bf-c563953515ea {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.962401] env[65758]: DEBUG nova.compute.provider_tree [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1322.024853] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1322.025158] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1322.025272] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleting the datastore file [datastore2] a9550f72-009c-4143-afe2-887727e5c071 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1322.025576] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-502c61e1-bc28-448f-8ecc-f4bdd0e97873 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.036044] env[65758]: DEBUG oslo_vmware.api [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1322.036044] env[65758]: value = "task-4661584" [ 1322.036044] env[65758]: _type = "Task" [ 1322.036044] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.050156] env[65758]: DEBUG oslo_vmware.api [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.150616] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661581, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.465949] env[65758]: DEBUG nova.scheduler.client.report [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1322.546441] env[65758]: DEBUG oslo_vmware.api [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.650034] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661581, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.364354} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.650309] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c785acf9-ab1e-448c-a793-d1eed56d0b17/c785acf9-ab1e-448c-a793-d1eed56d0b17.vmdk to [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742/e5b042e0-3dba-4bfe-9e4d-1d55bcb72742.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1322.651124] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbd64d1-b849-4406-8d70-90a033799dc2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.673516] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742/e5b042e0-3dba-4bfe-9e4d-1d55bcb72742.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1322.673845] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e628924-ce33-4ee8-b30c-135053a10300 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.693984] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1322.693984] env[65758]: value = "task-4661585" [ 1322.693984] env[65758]: _type = "Task" [ 1322.693984] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.702523] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661585, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.971363] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.644s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.999729] env[65758]: INFO nova.scheduler.client.report [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Deleted allocations for instance cdc1cfab-4f75-4caf-a4ee-8197af083353 [ 1323.048479] env[65758]: DEBUG oslo_vmware.api [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.566837} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.048640] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1323.049183] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1323.049183] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1323.049277] env[65758]: INFO nova.compute.manager [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: a9550f72-009c-4143-afe2-887727e5c071] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1323.049537] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1323.049743] env[65758]: DEBUG nova.compute.manager [-] [instance: a9550f72-009c-4143-afe2-887727e5c071] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1323.049845] env[65758]: DEBUG nova.network.neutron [-] [instance: a9550f72-009c-4143-afe2-887727e5c071] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1323.050148] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1323.050705] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1323.050972] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1323.100519] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1323.204766] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661585, 'name': ReconfigVM_Task, 'duration_secs': 0.328935} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.205108] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Reconfigured VM instance instance-00000075 to attach disk [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742/e5b042e0-3dba-4bfe-9e4d-1d55bcb72742.vmdk or device None with type streamOptimized {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1323.205792] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f89731bc-db3a-434f-adaf-5284c6ee72fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.212772] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1323.212772] env[65758]: value = "task-4661586" [ 1323.212772] env[65758]: _type = "Task" [ 1323.212772] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.221784] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661586, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.510962] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b303dac5-8cbf-4d27-8a03-ed081b7e5694 tempest-ServersTestJSON-887760377 tempest-ServersTestJSON-887760377-project-member] Lock "cdc1cfab-4f75-4caf-a4ee-8197af083353" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.605s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.534223] env[65758]: DEBUG nova.compute.manager [req-7b654dfe-e408-44ad-8fda-15402ae7ab9c req-ce226479-5ba8-4743-b209-dc14edcc964e service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Received event network-vif-deleted-8bba2462-60e3-4a60-9eac-f9e7a6e5a898 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1323.534390] env[65758]: INFO nova.compute.manager [req-7b654dfe-e408-44ad-8fda-15402ae7ab9c req-ce226479-5ba8-4743-b209-dc14edcc964e service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Neutron deleted interface 8bba2462-60e3-4a60-9eac-f9e7a6e5a898; detaching it from the instance and deleting it from the info cache [ 1323.534562] env[65758]: DEBUG nova.network.neutron [req-7b654dfe-e408-44ad-8fda-15402ae7ab9c req-ce226479-5ba8-4743-b209-dc14edcc964e service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1323.726652] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661586, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.013028] env[65758]: DEBUG nova.network.neutron [-] [instance: a9550f72-009c-4143-afe2-887727e5c071] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1324.038246] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f93458fe-5939-49fd-9332-18445ef68f72 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.049608] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f24c65-3f3f-4893-8ddb-778c63e1a5f1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.078812] env[65758]: DEBUG nova.compute.manager [req-7b654dfe-e408-44ad-8fda-15402ae7ab9c req-ce226479-5ba8-4743-b209-dc14edcc964e service nova] [instance: a9550f72-009c-4143-afe2-887727e5c071] Detach interface failed, port_id=8bba2462-60e3-4a60-9eac-f9e7a6e5a898, reason: Instance a9550f72-009c-4143-afe2-887727e5c071 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1324.225253] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661586, 'name': Rename_Task, 'duration_secs': 0.628686} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.225541] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1324.225952] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83154cbd-b38a-45c4-9ebc-30c1e3532ac4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.234243] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1324.234243] env[65758]: value = "task-4661587" [ 1324.234243] env[65758]: _type = "Task" [ 1324.234243] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.255200] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661587, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.515599] env[65758]: INFO nova.compute.manager [-] [instance: a9550f72-009c-4143-afe2-887727e5c071] Took 1.47 seconds to deallocate network for instance. [ 1324.744865] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661587, 'name': PowerOnVM_Task} progress is 87%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.023214] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1325.023472] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1325.023729] env[65758]: DEBUG nova.objects.instance [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'resources' on Instance uuid a9550f72-009c-4143-afe2-887727e5c071 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1325.245097] env[65758]: DEBUG oslo_vmware.api [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661587, 'name': PowerOnVM_Task, 'duration_secs': 0.606802} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.245486] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1325.360038] env[65758]: DEBUG nova.compute.manager [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1325.360993] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244541c3-ff7f-423a-8864-b9d6cb7389c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.576240] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4caf63ef-2888-4a78-af6f-5d041e6c5ca2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.584262] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b138e6-3167-47e2-9539-92e927282ce4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.617201] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d0912f-10a2-4923-b120-d73159ae117d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.625941] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec40851-d278-441a-9656-4d3512b63918 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.640902] env[65758]: DEBUG nova.compute.provider_tree [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.877719] env[65758]: DEBUG oslo_concurrency.lockutils [None req-68ce34f5-47e3-4aed-bfc9-7955bf3e54f4 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.164s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.144966] env[65758]: DEBUG nova.scheduler.client.report [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1326.650377] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.627s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.676612] env[65758]: INFO nova.scheduler.client.report [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleted allocations for instance a9550f72-009c-4143-afe2-887727e5c071 [ 1326.719361] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.719624] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.719830] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.720015] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.720198] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.722414] env[65758]: INFO nova.compute.manager [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Terminating instance [ 1327.185327] env[65758]: DEBUG oslo_concurrency.lockutils [None req-4894deb6-b509-4d19-aa23-ef5971302fe5 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "a9550f72-009c-4143-afe2-887727e5c071" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.286s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.226954] env[65758]: DEBUG nova.compute.manager [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1327.227221] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1327.228189] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d640969-09f1-4147-80e3-39835cbc45e9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.236575] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1327.236870] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d77bd077-0115-45bb-b8cc-9341e8afe43b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.244289] env[65758]: DEBUG oslo_vmware.api [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1327.244289] env[65758]: value = "task-4661588" [ 1327.244289] env[65758]: _type = "Task" [ 1327.244289] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.253294] env[65758]: DEBUG oslo_vmware.api [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661588, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.756470] env[65758]: DEBUG oslo_vmware.api [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661588, 'name': PowerOffVM_Task, 'duration_secs': 0.195276} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.756745] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1327.756921] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1327.757512] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85a7602f-8995-443e-988f-312aa850280d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.825173] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1327.825423] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1327.825588] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleting the datastore file [datastore2] e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1327.825961] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76d67afe-64d5-46bc-8539-0a67a2209a61 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.833521] env[65758]: DEBUG oslo_vmware.api [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for the task: (returnval){ [ 1327.833521] env[65758]: value = "task-4661590" [ 1327.833521] env[65758]: _type = "Task" [ 1327.833521] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.689550] env[65758]: DEBUG oslo_vmware.api [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.689868] env[65758]: WARNING oslo_vmware.common.loopingcall [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] task run outlasted interval by 0.35599099999999995 sec [ 1328.699037] env[65758]: DEBUG oslo_vmware.api [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Task: {'id': task-4661590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146773} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.699280] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1328.699450] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1328.699614] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1328.699804] env[65758]: INFO nova.compute.manager [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Took 1.47 seconds to destroy the instance on the hypervisor. [ 1328.700046] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1328.700238] env[65758]: DEBUG nova.compute.manager [-] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1328.700331] env[65758]: DEBUG nova.network.neutron [-] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1328.700565] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1328.701233] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1328.701475] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1328.763808] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1329.218153] env[65758]: DEBUG nova.compute.manager [req-60740219-35b6-4a0c-9fd9-77fc2e8055dc req-ecf5bf8d-49ee-4044-9c6d-55fd5f2587d3 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Received event network-vif-deleted-61227a3e-82c2-4ebf-b71b-b953b5667f90 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1329.218286] env[65758]: INFO nova.compute.manager [req-60740219-35b6-4a0c-9fd9-77fc2e8055dc req-ecf5bf8d-49ee-4044-9c6d-55fd5f2587d3 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Neutron deleted interface 61227a3e-82c2-4ebf-b71b-b953b5667f90; detaching it from the instance and deleting it from the info cache [ 1329.218462] env[65758]: DEBUG nova.network.neutron [req-60740219-35b6-4a0c-9fd9-77fc2e8055dc req-ecf5bf8d-49ee-4044-9c6d-55fd5f2587d3 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1329.226072] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "9be7cd63-74b0-475c-9928-12330eb3c54c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.226693] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.696218] env[65758]: DEBUG nova.network.neutron [-] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1329.720968] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40f861e5-1bd6-4080-b2f4-097b838fa116 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.728845] env[65758]: DEBUG nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1329.734401] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647d2de9-6497-40ff-84da-0af469764992 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.760892] env[65758]: DEBUG nova.compute.manager [req-60740219-35b6-4a0c-9fd9-77fc2e8055dc req-ecf5bf8d-49ee-4044-9c6d-55fd5f2587d3 service nova] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Detach interface failed, port_id=61227a3e-82c2-4ebf-b71b-b953b5667f90, reason: Instance e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1330.199524] env[65758]: INFO nova.compute.manager [-] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Took 1.50 seconds to deallocate network for instance. [ 1330.265306] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.265588] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.267303] env[65758]: INFO nova.compute.claims [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1330.706185] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.318898] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0022b9-3f18-4706-a435-7c1dcbc5ceb5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.327097] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbc6192-3772-43cb-92a8-ada90827a13b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.357605] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba30a1a-0581-4d9f-95b5-c7b80abf17c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.365769] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6fa284-7662-4529-95d9-2621fa80db10 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.379795] env[65758]: DEBUG nova.compute.provider_tree [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.883309] env[65758]: DEBUG nova.scheduler.client.report [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1332.388592] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1332.389130] env[65758]: DEBUG nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1332.392051] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.686s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1332.392669] env[65758]: DEBUG nova.objects.instance [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lazy-loading 'resources' on Instance uuid e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.896551] env[65758]: DEBUG nova.compute.utils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1332.901256] env[65758]: DEBUG nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1332.901315] env[65758]: DEBUG nova.network.neutron [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1332.901587] env[65758]: WARNING neutronclient.v2_0.client [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1332.901922] env[65758]: WARNING neutronclient.v2_0.client [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1332.902478] env[65758]: WARNING openstack [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1332.902825] env[65758]: WARNING openstack [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1332.940841] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128ea7f4-cff2-430f-a66e-b4545cc221f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.950385] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb5b13c-216e-44cf-9c9d-814914c5bdcb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.958201] env[65758]: DEBUG nova.policy [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcb6cf498b804adb971dd7e1722c277b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f32b2100e0824c56ab852e0d1bb37e87', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1332.984273] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98bb037-7cff-4203-b3f0-69b143083309 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.992657] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7f5551-754a-4de6-9be1-63888dc56b23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.006882] env[65758]: DEBUG nova.compute.provider_tree [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.261614] env[65758]: DEBUG nova.network.neutron [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Successfully created port: 123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1333.402916] env[65758]: DEBUG nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1333.509941] env[65758]: DEBUG nova.scheduler.client.report [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1334.014351] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.622s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.042234] env[65758]: INFO nova.scheduler.client.report [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Deleted allocations for instance e5b042e0-3dba-4bfe-9e4d-1d55bcb72742 [ 1334.413891] env[65758]: DEBUG nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1334.441332] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1334.441603] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1334.441756] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1334.441938] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1334.442090] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1334.442233] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1334.442434] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1334.442584] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1334.442743] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1334.442903] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1334.443078] env[65758]: DEBUG nova.virt.hardware [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1334.443979] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3424ef8-6c2a-4855-a8d7-a5f4e6d314fa {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.453891] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f764b22-195b-45ca-99de-f0893a7dc0a4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.550449] env[65758]: DEBUG oslo_concurrency.lockutils [None req-31b99eb0-2bd1-4d5b-b56a-8b1687f35773 tempest-ServerActionsTestOtherB-1402960202 tempest-ServerActionsTestOtherB-1402960202-project-member] Lock "e5b042e0-3dba-4bfe-9e4d-1d55bcb72742" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.831s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.658997] env[65758]: DEBUG nova.compute.manager [req-d472df92-96ee-4bc8-a916-db6f310b15ae req-f1398f2c-2872-4bb3-82ca-cb9836fb201d service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Received event network-vif-plugged-123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1334.659183] env[65758]: DEBUG oslo_concurrency.lockutils [req-d472df92-96ee-4bc8-a916-db6f310b15ae req-f1398f2c-2872-4bb3-82ca-cb9836fb201d service nova] Acquiring lock "9be7cd63-74b0-475c-9928-12330eb3c54c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.659385] env[65758]: DEBUG oslo_concurrency.lockutils [req-d472df92-96ee-4bc8-a916-db6f310b15ae req-f1398f2c-2872-4bb3-82ca-cb9836fb201d service nova] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.659545] env[65758]: DEBUG oslo_concurrency.lockutils [req-d472df92-96ee-4bc8-a916-db6f310b15ae req-f1398f2c-2872-4bb3-82ca-cb9836fb201d service nova] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.659712] env[65758]: DEBUG nova.compute.manager [req-d472df92-96ee-4bc8-a916-db6f310b15ae req-f1398f2c-2872-4bb3-82ca-cb9836fb201d service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] No waiting events found dispatching network-vif-plugged-123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1334.659872] env[65758]: WARNING nova.compute.manager [req-d472df92-96ee-4bc8-a916-db6f310b15ae req-f1398f2c-2872-4bb3-82ca-cb9836fb201d service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Received unexpected event network-vif-plugged-123eca7f-0c97-4859-a1f3-d7d80d91b3cc for instance with vm_state building and task_state spawning. [ 1334.741281] env[65758]: DEBUG nova.network.neutron [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Successfully updated port: 123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1335.244334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.244607] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1335.244687] env[65758]: DEBUG nova.network.neutron [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1335.748041] env[65758]: WARNING openstack [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1335.748564] env[65758]: WARNING openstack [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1335.784236] env[65758]: DEBUG nova.network.neutron [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1335.860183] env[65758]: WARNING neutronclient.v2_0.client [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1335.860852] env[65758]: WARNING openstack [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1335.861228] env[65758]: WARNING openstack [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1335.945303] env[65758]: DEBUG nova.network.neutron [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Updating instance_info_cache with network_info: [{"id": "123eca7f-0c97-4859-a1f3-d7d80d91b3cc", "address": "fa:16:3e:40:b0:5a", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap123eca7f-0c", "ovs_interfaceid": "123eca7f-0c97-4859-a1f3-d7d80d91b3cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1336.448644] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1336.449151] env[65758]: DEBUG nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Instance network_info: |[{"id": "123eca7f-0c97-4859-a1f3-d7d80d91b3cc", "address": "fa:16:3e:40:b0:5a", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap123eca7f-0c", "ovs_interfaceid": "123eca7f-0c97-4859-a1f3-d7d80d91b3cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1336.449509] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:b0:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc16c915-cff1-4faa-a529-9773ee9bab7e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '123eca7f-0c97-4859-a1f3-d7d80d91b3cc', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1336.456874] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1336.457114] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1336.457344] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96b0e891-382f-4dc6-8c59-6c786735f9bb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.477417] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1336.477417] env[65758]: value = "task-4661592" [ 1336.477417] env[65758]: _type = "Task" [ 1336.477417] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.485486] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661592, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.693878] env[65758]: DEBUG nova.compute.manager [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Received event network-changed-123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1336.694105] env[65758]: DEBUG nova.compute.manager [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Refreshing instance network info cache due to event network-changed-123eca7f-0c97-4859-a1f3-d7d80d91b3cc. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1336.694322] env[65758]: DEBUG oslo_concurrency.lockutils [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] Acquiring lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.694813] env[65758]: DEBUG oslo_concurrency.lockutils [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] Acquired lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.694813] env[65758]: DEBUG nova.network.neutron [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Refreshing network info cache for port 123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1336.987336] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661592, 'name': CreateVM_Task, 'duration_secs': 0.299673} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.987527] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1336.987995] env[65758]: WARNING neutronclient.v2_0.client [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1336.988388] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.988535] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.988852] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1336.989123] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-099643e5-7d91-46ed-9ed5-299db3ef3f9f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.994376] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1336.994376] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5259cb08-2bf3-83b1-2de5-36071f24c51c" [ 1336.994376] env[65758]: _type = "Task" [ 1336.994376] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.002606] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5259cb08-2bf3-83b1-2de5-36071f24c51c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.197612] env[65758]: WARNING neutronclient.v2_0.client [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1337.198306] env[65758]: WARNING openstack [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1337.198657] env[65758]: WARNING openstack [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1337.365449] env[65758]: WARNING neutronclient.v2_0.client [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1337.366179] env[65758]: WARNING openstack [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1337.366534] env[65758]: WARNING openstack [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1337.507352] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5259cb08-2bf3-83b1-2de5-36071f24c51c, 'name': SearchDatastore_Task, 'duration_secs': 0.011927} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.507702] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.507799] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1337.508026] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.508167] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1337.508342] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1337.508611] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27cb14f1-2212-448b-83bf-5fd78ab45477 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.518066] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1337.518294] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1337.519039] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e98f1a0a-d101-41c4-8a60-6af5582da77c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.524919] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1337.524919] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc3b44-c879-6123-938e-5050cba9bde2" [ 1337.524919] env[65758]: _type = "Task" [ 1337.524919] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.533184] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc3b44-c879-6123-938e-5050cba9bde2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.854181] env[65758]: DEBUG nova.network.neutron [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Updated VIF entry in instance network info cache for port 123eca7f-0c97-4859-a1f3-d7d80d91b3cc. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1337.854586] env[65758]: DEBUG nova.network.neutron [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Updating instance_info_cache with network_info: [{"id": "123eca7f-0c97-4859-a1f3-d7d80d91b3cc", "address": "fa:16:3e:40:b0:5a", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap123eca7f-0c", "ovs_interfaceid": "123eca7f-0c97-4859-a1f3-d7d80d91b3cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1338.036023] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc3b44-c879-6123-938e-5050cba9bde2, 'name': SearchDatastore_Task, 'duration_secs': 0.008883} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.036855] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-745a1606-fff7-4616-908f-efba5edcb3d7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.043369] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1338.043369] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc3852-5bc3-7edc-7522-1345dbc2a757" [ 1338.043369] env[65758]: _type = "Task" [ 1338.043369] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.052394] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc3852-5bc3-7edc-7522-1345dbc2a757, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.357109] env[65758]: DEBUG oslo_concurrency.lockutils [req-c0c39ff2-da1c-4c88-b646-4fc965ee871d req-93912ef6-6dbf-4ee4-9f6a-316f72592419 service nova] Releasing lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.554913] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52dc3852-5bc3-7edc-7522-1345dbc2a757, 'name': SearchDatastore_Task, 'duration_secs': 0.009828} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.555204] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.555451] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 9be7cd63-74b0-475c-9928-12330eb3c54c/9be7cd63-74b0-475c-9928-12330eb3c54c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1338.555719] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93f9e2ca-46ae-415c-8b8b-4e8202afbc28 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.563418] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1338.563418] env[65758]: value = "task-4661593" [ 1338.563418] env[65758]: _type = "Task" [ 1338.563418] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.571511] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.073832] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661593, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459677} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.074113] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 9be7cd63-74b0-475c-9928-12330eb3c54c/9be7cd63-74b0-475c-9928-12330eb3c54c.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1339.074324] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1339.074585] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-259c514b-60e3-4d5e-b74b-34333a580efc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.082085] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1339.082085] env[65758]: value = "task-4661594" [ 1339.082085] env[65758]: _type = "Task" [ 1339.082085] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.091425] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661594, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.594052] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661594, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061001} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.594052] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1339.594052] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11eb979-f943-44cf-b41b-507581cef677 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.622015] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 9be7cd63-74b0-475c-9928-12330eb3c54c/9be7cd63-74b0-475c-9928-12330eb3c54c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1339.622015] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96899801-d5ba-4f3c-9592-b557ff58ecd7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.642024] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1339.642024] env[65758]: value = "task-4661595" [ 1339.642024] env[65758]: _type = "Task" [ 1339.642024] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.651197] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661595, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.152187] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661595, 'name': ReconfigVM_Task, 'duration_secs': 0.273709} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.152474] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 9be7cd63-74b0-475c-9928-12330eb3c54c/9be7cd63-74b0-475c-9928-12330eb3c54c.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1340.153105] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66772de6-13d5-4d77-9c67-55b89a74debc {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.161610] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1340.161610] env[65758]: value = "task-4661596" [ 1340.161610] env[65758]: _type = "Task" [ 1340.161610] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.172214] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661596, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.672838] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661596, 'name': Rename_Task, 'duration_secs': 0.143453} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.673362] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1340.673452] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2208ae86-fd5a-48e7-8ab2-16a68f9e1aba {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.682525] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1340.682525] env[65758]: value = "task-4661597" [ 1340.682525] env[65758]: _type = "Task" [ 1340.682525] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.692639] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.193686] env[65758]: DEBUG oslo_vmware.api [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661597, 'name': PowerOnVM_Task, 'duration_secs': 0.446683} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.194028] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1341.194272] env[65758]: INFO nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Took 6.78 seconds to spawn the instance on the hypervisor. [ 1341.194480] env[65758]: DEBUG nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1341.195285] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93d2707-9a19-4f97-b236-728c20c63e85 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.366152] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "6b11147a-3901-4314-8c9e-0868debce49a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.366378] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "6b11147a-3901-4314-8c9e-0868debce49a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.712790] env[65758]: INFO nova.compute.manager [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Took 11.47 seconds to build instance. [ 1341.868415] env[65758]: DEBUG nova.compute.manager [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1342.215342] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b5f2c891-16b5-4548-98bb-0177206dcfa6 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.989s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1342.392015] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.392334] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.393840] env[65758]: INFO nova.compute.claims [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1342.420606] env[65758]: DEBUG nova.compute.manager [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Received event network-changed-123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1342.421136] env[65758]: DEBUG nova.compute.manager [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Refreshing instance network info cache due to event network-changed-123eca7f-0c97-4859-a1f3-d7d80d91b3cc. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1342.421330] env[65758]: DEBUG oslo_concurrency.lockutils [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] Acquiring lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.422038] env[65758]: DEBUG oslo_concurrency.lockutils [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] Acquired lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.422038] env[65758]: DEBUG nova.network.neutron [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Refreshing network info cache for port 123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1342.925246] env[65758]: WARNING neutronclient.v2_0.client [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1342.925981] env[65758]: WARNING openstack [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1342.926472] env[65758]: WARNING openstack [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1343.093085] env[65758]: WARNING neutronclient.v2_0.client [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1343.093771] env[65758]: WARNING openstack [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1343.094133] env[65758]: WARNING openstack [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1343.174557] env[65758]: DEBUG nova.network.neutron [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Updated VIF entry in instance network info cache for port 123eca7f-0c97-4859-a1f3-d7d80d91b3cc. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1343.174984] env[65758]: DEBUG nova.network.neutron [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Updating instance_info_cache with network_info: [{"id": "123eca7f-0c97-4859-a1f3-d7d80d91b3cc", "address": "fa:16:3e:40:b0:5a", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap123eca7f-0c", "ovs_interfaceid": "123eca7f-0c97-4859-a1f3-d7d80d91b3cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1343.444029] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbbff8c-be7a-4177-a093-7b508bb9dfcb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.452320] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665b40eb-f66d-49a3-b39b-18f368d346f4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.494383] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc83fe1-766f-4aaf-be39-e93c6d154ec8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.502209] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b74d74b-76b2-465d-899e-a0862ff60b3d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.515460] env[65758]: DEBUG nova.compute.provider_tree [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1343.678836] env[65758]: DEBUG oslo_concurrency.lockutils [req-8473ffbc-1a5a-4b84-80d9-438f00b64281 req-d9f26436-97d9-4d16-9073-7452e575a176 service nova] Releasing lock "refresh_cache-9be7cd63-74b0-475c-9928-12330eb3c54c" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1344.018505] env[65758]: DEBUG nova.scheduler.client.report [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1344.523267] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1344.523907] env[65758]: DEBUG nova.compute.manager [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1345.028728] env[65758]: DEBUG nova.compute.utils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1345.030238] env[65758]: DEBUG nova.compute.manager [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Not allocating networking since 'none' was specified. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2016}} [ 1345.531775] env[65758]: DEBUG nova.compute.manager [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1346.542062] env[65758]: DEBUG nova.compute.manager [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1346.566679] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1346.566953] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1346.567140] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1346.567334] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1346.567479] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1346.567625] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1346.567835] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1346.567992] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1346.568178] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1346.568337] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1346.568506] env[65758]: DEBUG nova.virt.hardware [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1346.569396] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfa154a-0343-4f8f-8f03-cccf173066d9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.579259] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10556d8-8643-46a1-b292-71ce4b1ec422 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.593008] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1346.598436] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Creating folder: Project (2d0545a288ad484582095a99f604bce5). Parent ref: group-v909763. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1346.598694] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1321847-886f-4f41-955f-ee524ee53f29 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.610385] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Created folder: Project (2d0545a288ad484582095a99f604bce5) in parent group-v909763. [ 1346.610559] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Creating folder: Instances. Parent ref: group-v910104. {{(pid=65758) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1346.610783] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bc6ca17-74a6-476b-b218-cfd9e11891c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.620136] env[65758]: INFO nova.virt.vmwareapi.vm_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Created folder: Instances in parent group-v910104. [ 1346.620369] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1346.620562] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1346.620764] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f22b3f7-e08a-49d3-ab83-642f201bc66f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.636323] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1346.636323] env[65758]: value = "task-4661600" [ 1346.636323] env[65758]: _type = "Task" [ 1346.636323] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.643616] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661600, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.148076] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661600, 'name': CreateVM_Task, 'duration_secs': 0.252097} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.148246] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1347.148661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.148821] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1347.149162] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1347.149425] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3861291-25b1-4093-9e0e-c8e9f407ff5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.155261] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1347.155261] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]528c32a2-15db-314d-7eec-1d823665b304" [ 1347.155261] env[65758]: _type = "Task" [ 1347.155261] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.163924] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528c32a2-15db-314d-7eec-1d823665b304, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.665855] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]528c32a2-15db-314d-7eec-1d823665b304, 'name': SearchDatastore_Task, 'duration_secs': 0.009952} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.666335] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1347.666406] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1347.666650] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.666784] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1347.666961] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1347.667251] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ab52fef-44e7-422f-a009-959699f897ed {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.676558] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1347.676734] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1347.677483] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3cd19de-8482-4c8c-9550-68bd52d973bd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.683049] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1347.683049] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]5263e019-f9d4-1ca7-a861-2b93a92d52e0" [ 1347.683049] env[65758]: _type = "Task" [ 1347.683049] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.690960] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5263e019-f9d4-1ca7-a861-2b93a92d52e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.193627] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]5263e019-f9d4-1ca7-a861-2b93a92d52e0, 'name': SearchDatastore_Task, 'duration_secs': 0.008579} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.194453] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5971d66-356f-4d7c-b859-422ddaf7734a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.199957] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1348.199957] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527e2b30-a08f-d082-e9b0-f573040ced79" [ 1348.199957] env[65758]: _type = "Task" [ 1348.199957] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.207886] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527e2b30-a08f-d082-e9b0-f573040ced79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.710802] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527e2b30-a08f-d082-e9b0-f573040ced79, 'name': SearchDatastore_Task, 'duration_secs': 0.009344} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.711231] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1348.711367] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a/6b11147a-3901-4314-8c9e-0868debce49a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1348.711635] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3abfbaa-e8a6-4a17-b295-0127c01a2b1e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.718768] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1348.718768] env[65758]: value = "task-4661601" [ 1348.718768] env[65758]: _type = "Task" [ 1348.718768] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.726821] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.229395] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475532} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.229610] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a/6b11147a-3901-4314-8c9e-0868debce49a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1349.229690] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1349.229966] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-efb7568d-d744-4953-ab39-8594a6eadec7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.237258] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1349.237258] env[65758]: value = "task-4661602" [ 1349.237258] env[65758]: _type = "Task" [ 1349.237258] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.246691] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.747742] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063116} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.748024] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1349.748810] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa5c150-18d3-403f-951b-2405ac1f43c1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.769212] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a/6b11147a-3901-4314-8c9e-0868debce49a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1349.769470] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-230aadfb-5227-4e6d-90d7-50c39452a1c6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.789073] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1349.789073] env[65758]: value = "task-4661603" [ 1349.789073] env[65758]: _type = "Task" [ 1349.789073] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.797235] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661603, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.299497] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661603, 'name': ReconfigVM_Task, 'duration_secs': 0.263907} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.299769] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a/6b11147a-3901-4314-8c9e-0868debce49a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1350.300504] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f6aa633-8b1f-4d65-9d3e-f849e654b645 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.308562] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1350.308562] env[65758]: value = "task-4661604" [ 1350.308562] env[65758]: _type = "Task" [ 1350.308562] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.318703] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661604, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.819318] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661604, 'name': Rename_Task, 'duration_secs': 0.130219} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.819684] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1350.819813] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5b8174b-31aa-4fbe-a883-2ab75ba6aa3a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.827459] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1350.827459] env[65758]: value = "task-4661605" [ 1350.827459] env[65758]: _type = "Task" [ 1350.827459] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.835610] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661605, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.337952] env[65758]: DEBUG oslo_vmware.api [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661605, 'name': PowerOnVM_Task, 'duration_secs': 0.438225} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.338343] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1351.338570] env[65758]: INFO nova.compute.manager [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Took 4.80 seconds to spawn the instance on the hypervisor. [ 1351.338767] env[65758]: DEBUG nova.compute.manager [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1351.339555] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97de85a-ed3c-432a-8f1f-e684c8597ac4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.860703] env[65758]: INFO nova.compute.manager [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Took 9.49 seconds to build instance. [ 1352.362627] env[65758]: INFO nova.compute.manager [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Rebuilding instance [ 1352.365012] env[65758]: DEBUG oslo_concurrency.lockutils [None req-fdaa7159-eef6-41f0-b9e7-a56cea37c434 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "6b11147a-3901-4314-8c9e-0868debce49a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.999s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.409910] env[65758]: DEBUG nova.compute.manager [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1352.410773] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5214c0b6-52d8-47a4-9273-411f6f98c1c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.424584] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1353.424933] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77651efa-6a8a-4aa1-a15f-5d09466223f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.436374] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1353.436374] env[65758]: value = "task-4661606" [ 1353.436374] env[65758]: _type = "Task" [ 1353.436374] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.445929] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661606, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.947224] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661606, 'name': PowerOffVM_Task, 'duration_secs': 0.131757} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.947491] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1353.948202] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1353.948970] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30d9f61-911d-4781-99cd-98199404bf83 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.956326] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1353.956557] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55ed34ce-6d24-4f7d-8f39-2c7fee5fdd47 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.984533] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1353.984748] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1353.984904] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Deleting the datastore file [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1353.985335] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdcf53d1-6277-4470-ae39-dc10fc63a9e3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.992703] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1353.992703] env[65758]: value = "task-4661608" [ 1353.992703] env[65758]: _type = "Task" [ 1353.992703] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.001038] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.503345] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661608, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094359} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.503728] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1354.503787] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1354.503950] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1355.534920] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1355.535235] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1355.535337] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1355.535514] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1355.535652] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1355.535791] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1355.536046] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1355.536218] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1355.536381] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1355.536536] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1355.536698] env[65758]: DEBUG nova.virt.hardware [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1355.537582] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abcdb6d-b576-4616-9ca3-01ea7663f9d2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.546251] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5997e8d-06c2-4b98-bbf7-ce28b3920e13 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.560013] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Instance VIF info [] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1355.565514] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1355.565744] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1355.565992] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b4d2480-be72-4ee2-a517-876567a6e1f2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.583593] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1355.583593] env[65758]: value = "task-4661609" [ 1355.583593] env[65758]: _type = "Task" [ 1355.583593] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.591259] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661609, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.094020] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661609, 'name': CreateVM_Task, 'duration_secs': 0.267768} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.094227] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1356.094623] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.094779] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1356.095123] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1356.095391] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-686caf94-5bd5-4e3a-ac33-f7c5489bd21b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.101160] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1356.101160] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a562a0-c262-2c1e-7cd4-50c09e15e9e6" [ 1356.101160] env[65758]: _type = "Task" [ 1356.101160] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.109123] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a562a0-c262-2c1e-7cd4-50c09e15e9e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.616241] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a562a0-c262-2c1e-7cd4-50c09e15e9e6, 'name': SearchDatastore_Task, 'duration_secs': 0.011305} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.616625] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.616920] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1356.617216] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.617414] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1356.617675] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1356.618043] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad345afd-4aff-4e0b-8247-105ccd7d3889 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.628188] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1356.628383] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1356.629109] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35526ef3-2761-4351-a630-63d14b408c9b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.634773] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1356.634773] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]525014f9-495e-8d1d-39c8-f56be0451d91" [ 1356.634773] env[65758]: _type = "Task" [ 1356.634773] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.642732] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525014f9-495e-8d1d-39c8-f56be0451d91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.145881] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]525014f9-495e-8d1d-39c8-f56be0451d91, 'name': SearchDatastore_Task, 'duration_secs': 0.00953} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.146700] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-013dba6d-6e98-4a02-9dac-364536314b0f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.152688] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1357.152688] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52a70a37-017b-7353-af34-98f8de9d7a01" [ 1357.152688] env[65758]: _type = "Task" [ 1357.152688] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.160800] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a70a37-017b-7353-af34-98f8de9d7a01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.663134] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52a70a37-017b-7353-af34-98f8de9d7a01, 'name': SearchDatastore_Task, 'duration_secs': 0.009791} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.663558] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1357.663649] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a/6b11147a-3901-4314-8c9e-0868debce49a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1357.663913] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79b70cb4-69ec-4c97-a02d-65b674f15187 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.671189] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1357.671189] env[65758]: value = "task-4661610" [ 1357.671189] env[65758]: _type = "Task" [ 1357.671189] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.679646] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.182479] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661610, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49088} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.182732] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a/6b11147a-3901-4314-8c9e-0868debce49a.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1358.182928] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1358.183225] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37f6d80c-aacc-455c-bb94-188a6d77c42b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.189777] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1358.189777] env[65758]: value = "task-4661611" [ 1358.189777] env[65758]: _type = "Task" [ 1358.189777] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.198143] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661611, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.700273] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661611, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07499} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.700639] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1358.701320] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e660eb-5689-473d-a08b-15d494935faf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.721212] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a/6b11147a-3901-4314-8c9e-0868debce49a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1358.721474] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adc5ce98-3775-40f0-b84e-8e3064bba6fb {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.740775] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1358.740775] env[65758]: value = "task-4661612" [ 1358.740775] env[65758]: _type = "Task" [ 1358.740775] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.748651] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661612, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.251554] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661612, 'name': ReconfigVM_Task, 'duration_secs': 0.265641} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.251817] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a/6b11147a-3901-4314-8c9e-0868debce49a.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1359.252465] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c50f70ac-7a22-4a76-bb3d-6b7d0208b3c0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.259023] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1359.259023] env[65758]: value = "task-4661613" [ 1359.259023] env[65758]: _type = "Task" [ 1359.259023] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.267642] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661613, 'name': Rename_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.769368] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661613, 'name': Rename_Task} progress is 14%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.269976] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661613, 'name': Rename_Task, 'duration_secs': 0.814282} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.270276] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1360.270526] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07f0a796-032c-4ca4-80fc-f03fc66c79c9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.277266] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1360.277266] env[65758]: value = "task-4661614" [ 1360.277266] env[65758]: _type = "Task" [ 1360.277266] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.285790] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.787419] env[65758]: DEBUG oslo_vmware.api [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661614, 'name': PowerOnVM_Task, 'duration_secs': 0.405998} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.787820] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1360.787893] env[65758]: DEBUG nova.compute.manager [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1360.788743] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307be9e4-2606-4831-a780-5a7b70fec14c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.304805] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.305534] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.305534] env[65758]: DEBUG nova.objects.instance [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=65758) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1362.129817] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "6b11147a-3901-4314-8c9e-0868debce49a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.130265] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "6b11147a-3901-4314-8c9e-0868debce49a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.130265] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "6b11147a-3901-4314-8c9e-0868debce49a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.130445] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "6b11147a-3901-4314-8c9e-0868debce49a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.130611] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "6b11147a-3901-4314-8c9e-0868debce49a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.132759] env[65758]: INFO nova.compute.manager [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Terminating instance [ 1362.314162] env[65758]: DEBUG oslo_concurrency.lockutils [None req-1acf7686-ffbe-4851-876b-43f5378c6152 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.636545] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "refresh_cache-6b11147a-3901-4314-8c9e-0868debce49a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.636802] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquired lock "refresh_cache-6b11147a-3901-4314-8c9e-0868debce49a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1362.636868] env[65758]: DEBUG nova.network.neutron [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1363.140070] env[65758]: WARNING neutronclient.v2_0.client [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1363.140806] env[65758]: WARNING openstack [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1363.141218] env[65758]: WARNING openstack [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1363.166194] env[65758]: DEBUG nova.network.neutron [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1363.233677] env[65758]: DEBUG nova.network.neutron [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1363.736792] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Releasing lock "refresh_cache-6b11147a-3901-4314-8c9e-0868debce49a" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1363.737258] env[65758]: DEBUG nova.compute.manager [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1363.737461] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1363.738393] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305db7c6-1bb5-475d-9302-b23739d0dbe2 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.746928] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1363.747184] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-897186d4-d667-4c58-a6db-da0ebf901228 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.754157] env[65758]: DEBUG oslo_vmware.api [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1363.754157] env[65758]: value = "task-4661615" [ 1363.754157] env[65758]: _type = "Task" [ 1363.754157] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.762853] env[65758]: DEBUG oslo_vmware.api [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.264667] env[65758]: DEBUG oslo_vmware.api [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661615, 'name': PowerOffVM_Task, 'duration_secs': 0.197688} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.265094] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1364.265094] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1364.265375] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5282258e-b00f-4af1-af2e-aea62809b006 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.290422] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1364.290673] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1364.290795] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Deleting the datastore file [datastore2] 6b11147a-3901-4314-8c9e-0868debce49a {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1364.291116] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be7e22b9-502a-47f2-9907-0986a4f981a0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.299159] env[65758]: DEBUG oslo_vmware.api [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for the task: (returnval){ [ 1364.299159] env[65758]: value = "task-4661617" [ 1364.299159] env[65758]: _type = "Task" [ 1364.299159] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.307364] env[65758]: DEBUG oslo_vmware.api [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661617, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.809814] env[65758]: DEBUG oslo_vmware.api [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Task: {'id': task-4661617, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091546} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.810070] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1364.810246] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1364.810413] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1364.810578] env[65758]: INFO nova.compute.manager [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1364.810819] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1364.811018] env[65758]: DEBUG nova.compute.manager [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1364.811119] env[65758]: DEBUG nova.network.neutron [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1364.811368] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1364.811862] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1364.813068] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1364.833177] env[65758]: DEBUG nova.network.neutron [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1364.833414] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1365.336464] env[65758]: DEBUG nova.network.neutron [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1365.839857] env[65758]: INFO nova.compute.manager [-] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Took 1.03 seconds to deallocate network for instance. [ 1366.347168] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.347544] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.347649] env[65758]: DEBUG nova.objects.instance [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lazy-loading 'resources' on Instance uuid 6b11147a-3901-4314-8c9e-0868debce49a {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1366.896915] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3833a6f8-a989-45aa-95eb-e5ba7fa1e726 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.905061] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2530be-df9e-4789-8c0e-35efc1a87c84 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.934662] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8afd67b-ead7-41ec-a492-e1a4c59e26e7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.942469] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4410b16-69a4-4304-aa25-98b759f4cd88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.956971] env[65758]: DEBUG nova.compute.provider_tree [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.461061] env[65758]: DEBUG nova.scheduler.client.report [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1367.965020] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.617s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1367.990122] env[65758]: INFO nova.scheduler.client.report [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Deleted allocations for instance 6b11147a-3901-4314-8c9e-0868debce49a [ 1368.497867] env[65758]: DEBUG oslo_concurrency.lockutils [None req-560160a1-feb3-442b-91c7-353bfbae9c71 tempest-ServersListShow296Test-1905955823 tempest-ServersListShow296Test-1905955823-project-member] Lock "6b11147a-3901-4314-8c9e-0868debce49a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.368s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.478481] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.982508] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.982812] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.983040] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.983233] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1372.984339] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65b4db1-1b9c-41a4-9bf4-2a5f855a17ce {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.994868] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3dddb5-7148-46eb-aed2-08cec7fdf3e0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.012569] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f05ed2-9af8-4612-81a1-070d4d77945f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.021041] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d3ebfa-3608-4f17-8c13-723fc1b0da54 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.054831] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179442MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1373.055059] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1373.055228] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.080476] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 9be7cd63-74b0-475c-9928-12330eb3c54c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1374.080775] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1374.080823] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=100GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '1', 'num_vm_active': '1', 'num_task_None': '1', 'num_os_type_None': '1', 'num_proj_f32b2100e0824c56ab852e0d1bb37e87': '1', 'io_workload': '0'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1374.107206] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198e1e01-a9aa-422d-9b4b-ea46b60ac4f9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.115042] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8dc6f2-5ee5-4a4a-98cf-edfb15fa9475 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.144087] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf6dff8-d02f-4e3d-a8f9-6d41f576072e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.151707] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac64bb9-89ac-42c6-8e85-44f91ce68408 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.164771] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1374.668407] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1375.173165] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1375.173590] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.118s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1377.174267] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.174658] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.174658] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.174741] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.478840] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.479094] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1378.479100] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1378.479507] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1379.414681] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "9be7cd63-74b0-475c-9928-12330eb3c54c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1379.414914] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1379.918276] env[65758]: DEBUG nova.compute.utils [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1380.421909] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1381.478543] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "9be7cd63-74b0-475c-9928-12330eb3c54c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1381.478906] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1381.479062] env[65758]: INFO nova.compute.manager [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Attaching volume cbf98e9b-6ce2-4e85-a174-0a7b0da8257b to /dev/sdb [ 1381.509298] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87909da4-24b9-48b4-ab0d-8a1052ab2f59 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.517261] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b257161-407a-4995-a445-ef158f5d4950 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.530617] env[65758]: DEBUG nova.virt.block_device [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Updating existing volume attachment record: dc8ce495-f009-4459-a452-6430a43f1f33 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1385.473276] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.073815] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1386.074085] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910108', 'volume_id': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'name': 'volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9be7cd63-74b0-475c-9928-12330eb3c54c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'serial': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1386.075009] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a757dfc6-0a87-4b54-bcd5-8c0d72b2bdc0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.092144] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5810a9f0-ccbf-4656-80f7-0f74bcd5d7be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.117512] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b/volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1386.117783] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e34f6bc-bd54-4eaf-a2d8-ffcbab325052 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.135356] env[65758]: DEBUG oslo_vmware.api [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1386.135356] env[65758]: value = "task-4661620" [ 1386.135356] env[65758]: _type = "Task" [ 1386.135356] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.143506] env[65758]: DEBUG oslo_vmware.api [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661620, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.647843] env[65758]: DEBUG oslo_vmware.api [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661620, 'name': ReconfigVM_Task, 'duration_secs': 0.340229} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.648241] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Reconfigured VM instance instance-0000007c to attach disk [datastore2] volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b/volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1386.656278] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a725f7d2-894f-48e1-88e9-7688d8b0dc26 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.679177] env[65758]: DEBUG oslo_vmware.api [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1386.679177] env[65758]: value = "task-4661621" [ 1386.679177] env[65758]: _type = "Task" [ 1386.679177] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.687994] env[65758]: DEBUG oslo_vmware.api [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661621, 'name': ReconfigVM_Task} progress is 6%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.189716] env[65758]: DEBUG oslo_vmware.api [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661621, 'name': ReconfigVM_Task, 'duration_secs': 0.13409} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.190065] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910108', 'volume_id': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'name': 'volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9be7cd63-74b0-475c-9928-12330eb3c54c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'serial': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1388.224824] env[65758]: DEBUG nova.objects.instance [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'flavor' on Instance uuid 9be7cd63-74b0-475c-9928-12330eb3c54c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1388.731270] env[65758]: DEBUG oslo_concurrency.lockutils [None req-b04b44c8-7491-4c59-ab0a-9f0933eaf5e0 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.252s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.937065] env[65758]: DEBUG oslo_concurrency.lockutils [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "9be7cd63-74b0-475c-9928-12330eb3c54c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1388.937065] env[65758]: DEBUG oslo_concurrency.lockutils [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.440156] env[65758]: INFO nova.compute.manager [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Detaching volume cbf98e9b-6ce2-4e85-a174-0a7b0da8257b [ 1389.472777] env[65758]: INFO nova.virt.block_device [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Attempting to driver detach volume cbf98e9b-6ce2-4e85-a174-0a7b0da8257b from mountpoint /dev/sdb [ 1389.472852] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1389.472975] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910108', 'volume_id': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'name': 'volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9be7cd63-74b0-475c-9928-12330eb3c54c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'serial': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1389.474161] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe69e73-a8fd-40cf-9253-9ce43b10f3bf {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.497879] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce227d0d-d2e4-4168-96c0-b105ae9ba23b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.505554] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2c7df4-fe05-4885-a02d-812b1db40f23 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.526670] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d85aa9d-c765-492f-974f-95cb53e034f7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.542796] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The volume has not been displaced from its original location: [datastore2] volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b/volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1389.548320] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1389.548668] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c635973-64b4-421e-845d-10591475e877 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.567645] env[65758]: DEBUG oslo_vmware.api [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1389.567645] env[65758]: value = "task-4661622" [ 1389.567645] env[65758]: _type = "Task" [ 1389.567645] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.576191] env[65758]: DEBUG oslo_vmware.api [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661622, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.078398] env[65758]: DEBUG oslo_vmware.api [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661622, 'name': ReconfigVM_Task, 'duration_secs': 0.214452} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.078688] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1390.083463] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e788ce63-fac4-4e50-9a53-7751bd247874 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.101155] env[65758]: DEBUG oslo_vmware.api [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1390.101155] env[65758]: value = "task-4661623" [ 1390.101155] env[65758]: _type = "Task" [ 1390.101155] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.110150] env[65758]: DEBUG oslo_vmware.api [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661623, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.611421] env[65758]: DEBUG oslo_vmware.api [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661623, 'name': ReconfigVM_Task, 'duration_secs': 0.168211} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.611832] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910108', 'volume_id': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'name': 'volume-cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9be7cd63-74b0-475c-9928-12330eb3c54c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b', 'serial': 'cbf98e9b-6ce2-4e85-a174-0a7b0da8257b'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1391.153336] env[65758]: DEBUG nova.objects.instance [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'flavor' on Instance uuid 9be7cd63-74b0-475c-9928-12330eb3c54c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1392.160685] env[65758]: DEBUG oslo_concurrency.lockutils [None req-762231f0-64be-49af-8d17-a70f16bbd35a tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.224s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.174500] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "9be7cd63-74b0-475c-9928-12330eb3c54c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.174961] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.174961] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "9be7cd63-74b0-475c-9928-12330eb3c54c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.175138] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.175279] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.177631] env[65758]: INFO nova.compute.manager [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Terminating instance [ 1393.681935] env[65758]: DEBUG nova.compute.manager [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1393.682227] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1393.683113] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b25b6de-ab01-487d-b07c-bf9ff65fd2f0 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.691628] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1393.691887] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c312d133-e0dd-4b30-b664-ae6de5420d24 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.699261] env[65758]: DEBUG oslo_vmware.api [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1393.699261] env[65758]: value = "task-4661624" [ 1393.699261] env[65758]: _type = "Task" [ 1393.699261] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.708751] env[65758]: DEBUG oslo_vmware.api [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661624, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.208857] env[65758]: DEBUG oslo_vmware.api [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661624, 'name': PowerOffVM_Task, 'duration_secs': 0.192428} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.209323] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1394.209323] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1394.209519] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03da0f88-7bfc-4b33-8486-07b2ce16b513 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.276223] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1394.276474] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1394.276676] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleting the datastore file [datastore2] 9be7cd63-74b0-475c-9928-12330eb3c54c {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1394.276950] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa8bfa6f-5dd0-44ff-bb39-09ea3682a574 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.285200] env[65758]: DEBUG oslo_vmware.api [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1394.285200] env[65758]: value = "task-4661626" [ 1394.285200] env[65758]: _type = "Task" [ 1394.285200] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.293876] env[65758]: DEBUG oslo_vmware.api [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.797310] env[65758]: DEBUG oslo_vmware.api [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145153} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.797570] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.797748] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.798015] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.798218] env[65758]: INFO nova.compute.manager [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1394.798522] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1394.798694] env[65758]: DEBUG nova.compute.manager [-] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1394.798790] env[65758]: DEBUG nova.network.neutron [-] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1394.799037] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1394.799559] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1394.799819] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1394.836585] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1395.267943] env[65758]: DEBUG nova.compute.manager [req-126c3a35-66ee-4570-87ec-edc6389e5542 req-97288da5-6640-4317-a362-45831f9d6597 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Received event network-vif-deleted-123eca7f-0c97-4859-a1f3-d7d80d91b3cc {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1395.268215] env[65758]: INFO nova.compute.manager [req-126c3a35-66ee-4570-87ec-edc6389e5542 req-97288da5-6640-4317-a362-45831f9d6597 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Neutron deleted interface 123eca7f-0c97-4859-a1f3-d7d80d91b3cc; detaching it from the instance and deleting it from the info cache [ 1395.268362] env[65758]: DEBUG nova.network.neutron [req-126c3a35-66ee-4570-87ec-edc6389e5542 req-97288da5-6640-4317-a362-45831f9d6597 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1395.748600] env[65758]: DEBUG nova.network.neutron [-] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1395.771349] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d71e1b1c-76f3-432a-9412-424fab07a218 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.782367] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19b9202-5fed-4409-9f31-dfb070ce1f88 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.809085] env[65758]: DEBUG nova.compute.manager [req-126c3a35-66ee-4570-87ec-edc6389e5542 req-97288da5-6640-4317-a362-45831f9d6597 service nova] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Detach interface failed, port_id=123eca7f-0c97-4859-a1f3-d7d80d91b3cc, reason: Instance 9be7cd63-74b0-475c-9928-12330eb3c54c could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1396.251859] env[65758]: INFO nova.compute.manager [-] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Took 1.45 seconds to deallocate network for instance. [ 1396.758635] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.759060] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.759158] env[65758]: DEBUG nova.objects.instance [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'resources' on Instance uuid 9be7cd63-74b0-475c-9928-12330eb3c54c {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1397.298018] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e009c020-5d5a-42bd-aabc-a9b2defe14b6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.305668] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eded0f3c-8d3a-47a9-a77b-bc01b628d68b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.336851] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e506560d-742b-4099-927d-878e42ffa681 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.344904] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83337c45-34cc-4d5f-a8ae-03fec9176eb7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.358933] env[65758]: DEBUG nova.compute.provider_tree [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.862959] env[65758]: DEBUG nova.scheduler.client.report [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1398.368535] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1398.390058] env[65758]: INFO nova.scheduler.client.report [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleted allocations for instance 9be7cd63-74b0-475c-9928-12330eb3c54c [ 1398.897219] env[65758]: DEBUG oslo_concurrency.lockutils [None req-c98ce269-ff59-4203-9ef8-7e8a30d94786 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "9be7cd63-74b0-475c-9928-12330eb3c54c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.722s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.227741] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1401.228044] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1401.730364] env[65758]: DEBUG nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Starting instance... {{(pid=65758) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1402.253956] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.254294] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1402.255829] env[65758]: INFO nova.compute.claims [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1403.293768] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b2c250-9dbd-452d-a9b2-18d44091c128 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.301920] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b88d75-ba79-4a58-b799-4140f26c5878 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.331503] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b4d0bc-7490-4150-99e1-ceb7c69b606b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.339266] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b6bd25-79bf-4ccd-9ced-a4e0a5163c5a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.354690] env[65758]: DEBUG nova.compute.provider_tree [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1403.857906] env[65758]: DEBUG nova.scheduler.client.report [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1404.363844] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.109s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.364408] env[65758]: DEBUG nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Start building networks asynchronously for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2874}} [ 1404.870758] env[65758]: DEBUG nova.compute.utils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1404.871854] env[65758]: DEBUG nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Allocating IP information in the background. {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2020}} [ 1404.872074] env[65758]: DEBUG nova.network.neutron [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] allocate_for_instance() {{(pid=65758) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1238}} [ 1404.872416] env[65758]: WARNING neutronclient.v2_0.client [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1404.872722] env[65758]: WARNING neutronclient.v2_0.client [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1404.873321] env[65758]: WARNING openstack [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1404.873662] env[65758]: WARNING openstack [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1404.881023] env[65758]: DEBUG nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Start building block device mappings for instance. {{(pid=65758) _build_resources /opt/stack/nova/nova/compute/manager.py:2909}} [ 1404.930813] env[65758]: DEBUG nova.policy [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcb6cf498b804adb971dd7e1722c277b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f32b2100e0824c56ab852e0d1bb37e87', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=65758) authorize /opt/stack/nova/nova/policy.py:192}} [ 1405.220781] env[65758]: DEBUG nova.network.neutron [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Successfully created port: def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:579}} [ 1405.890519] env[65758]: DEBUG nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Start spawning the instance on the hypervisor. {{(pid=65758) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2682}} [ 1405.918293] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-21T13:11:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-21T13:11:36Z,direct_url=,disk_format='vmdk',id=75a6399b-5100-4c51-b5cf-162bd505a28f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8a035a70e9c44ef7876c682f0ee3c231',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-21T13:11:36Z,virtual_size=,visibility=), allow threads: False {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:571}} [ 1405.918563] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Flavor limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1405.918746] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Image limits 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:360}} [ 1405.918952] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Flavor pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1405.919110] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Image pref 0:0:0 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:400}} [ 1405.919253] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=65758) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:438}} [ 1405.919455] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1405.919609] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:479}} [ 1405.919771] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Got 1 possible topologies {{(pid=65758) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:509}} [ 1405.919927] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1405.920112] env[65758]: DEBUG nova.virt.hardware [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=65758) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:585}} [ 1405.920998] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd18d56d-04ef-468c-8a38-f40e2857cd8d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.930809] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e79083a-be14-4ca8-a51c-9e554b149dd6 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.603055] env[65758]: DEBUG nova.compute.manager [req-5fed4f9b-5551-4698-8d6a-8f36d45e8003 req-920d8014-df99-40c8-91b5-1a8cc0d2df11 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Received event network-vif-plugged-def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1406.603055] env[65758]: DEBUG oslo_concurrency.lockutils [req-5fed4f9b-5551-4698-8d6a-8f36d45e8003 req-920d8014-df99-40c8-91b5-1a8cc0d2df11 service nova] Acquiring lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.603162] env[65758]: DEBUG oslo_concurrency.lockutils [req-5fed4f9b-5551-4698-8d6a-8f36d45e8003 req-920d8014-df99-40c8-91b5-1a8cc0d2df11 service nova] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.603256] env[65758]: DEBUG oslo_concurrency.lockutils [req-5fed4f9b-5551-4698-8d6a-8f36d45e8003 req-920d8014-df99-40c8-91b5-1a8cc0d2df11 service nova] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1406.603417] env[65758]: DEBUG nova.compute.manager [req-5fed4f9b-5551-4698-8d6a-8f36d45e8003 req-920d8014-df99-40c8-91b5-1a8cc0d2df11 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] No waiting events found dispatching network-vif-plugged-def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) pop_instance_event /opt/stack/nova/nova/compute/manager.py:345}} [ 1406.603576] env[65758]: WARNING nova.compute.manager [req-5fed4f9b-5551-4698-8d6a-8f36d45e8003 req-920d8014-df99-40c8-91b5-1a8cc0d2df11 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Received unexpected event network-vif-plugged-def5ecc0-da72-4351-8b10-eaeb7040f661 for instance with vm_state building and task_state spawning. [ 1406.673793] env[65758]: DEBUG nova.network.neutron [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Successfully updated port: def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) _update_port /opt/stack/nova/nova/network/neutron.py:617}} [ 1407.176661] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.177079] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1407.177079] env[65758]: DEBUG nova.network.neutron [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Building network info cache for instance {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2104}} [ 1407.680593] env[65758]: WARNING openstack [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1407.681034] env[65758]: WARNING openstack [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1407.717571] env[65758]: DEBUG nova.network.neutron [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Instance cache missing network info. {{(pid=65758) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3432}} [ 1407.790146] env[65758]: WARNING neutronclient.v2_0.client [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1407.790805] env[65758]: WARNING openstack [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1407.791170] env[65758]: WARNING openstack [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1407.869695] env[65758]: DEBUG nova.network.neutron [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Updating instance_info_cache with network_info: [{"id": "def5ecc0-da72-4351-8b10-eaeb7040f661", "address": "fa:16:3e:34:b3:20", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdef5ecc0-da", "ovs_interfaceid": "def5ecc0-da72-4351-8b10-eaeb7040f661", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1408.372373] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1408.372794] env[65758]: DEBUG nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Instance network_info: |[{"id": "def5ecc0-da72-4351-8b10-eaeb7040f661", "address": "fa:16:3e:34:b3:20", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdef5ecc0-da", "ovs_interfaceid": "def5ecc0-da72-4351-8b10-eaeb7040f661", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=65758) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2035}} [ 1408.373319] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:b3:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc16c915-cff1-4faa-a529-9773ee9bab7e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'def5ecc0-da72-4351-8b10-eaeb7040f661', 'vif_model': 'vmxnet3'}] {{(pid=65758) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1408.381824] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1408.382057] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Creating VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1408.382304] env[65758]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c2e95ef-0f3a-417a-965a-9188384a0513 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.405183] env[65758]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1408.405183] env[65758]: value = "task-4661627" [ 1408.405183] env[65758]: _type = "Task" [ 1408.405183] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.413645] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661627, 'name': CreateVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.632814] env[65758]: DEBUG nova.compute.manager [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Received event network-changed-def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1408.633034] env[65758]: DEBUG nova.compute.manager [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Refreshing instance network info cache due to event network-changed-def5ecc0-da72-4351-8b10-eaeb7040f661. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1408.633247] env[65758]: DEBUG oslo_concurrency.lockutils [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] Acquiring lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.633385] env[65758]: DEBUG oslo_concurrency.lockutils [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] Acquired lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1408.633537] env[65758]: DEBUG nova.network.neutron [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Refreshing network info cache for port def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1408.915586] env[65758]: DEBUG oslo_vmware.api [-] Task: {'id': task-4661627, 'name': CreateVM_Task, 'duration_secs': 0.297866} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.915791] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Created VM on the ESX host {{(pid=65758) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1408.916275] env[65758]: WARNING neutronclient.v2_0.client [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1408.916657] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.916806] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1408.917135] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1408.917414] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7edb2534-232e-416e-b30c-f978d4c23838 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.922441] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1408.922441] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52e150a6-1cbb-532d-b335-6d3ba6e7b960" [ 1408.922441] env[65758]: _type = "Task" [ 1408.922441] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.930192] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e150a6-1cbb-532d-b335-6d3ba6e7b960, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.136408] env[65758]: WARNING neutronclient.v2_0.client [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1409.137142] env[65758]: WARNING openstack [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1409.137503] env[65758]: WARNING openstack [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1409.289432] env[65758]: WARNING neutronclient.v2_0.client [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1409.290105] env[65758]: WARNING openstack [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1409.290472] env[65758]: WARNING openstack [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1409.367855] env[65758]: DEBUG nova.network.neutron [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Updated VIF entry in instance network info cache for port def5ecc0-da72-4351-8b10-eaeb7040f661. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1409.368239] env[65758]: DEBUG nova.network.neutron [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Updating instance_info_cache with network_info: [{"id": "def5ecc0-da72-4351-8b10-eaeb7040f661", "address": "fa:16:3e:34:b3:20", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdef5ecc0-da", "ovs_interfaceid": "def5ecc0-da72-4351-8b10-eaeb7040f661", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1409.433610] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52e150a6-1cbb-532d-b335-6d3ba6e7b960, 'name': SearchDatastore_Task, 'duration_secs': 0.010713} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.434029] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1409.434163] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Processing image 75a6399b-5100-4c51-b5cf-162bd505a28f {{(pid=65758) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1409.434409] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.434558] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1409.434736] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1409.435014] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-892059a0-52d0-4f96-bc8f-acdb7c8197a3 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.443951] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=65758) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1409.444157] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=65758) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1409.444843] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64d321b0-281e-4dc5-9331-2b7463dcb830 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.450426] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1409.450426] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]52d7dcf7-2416-b1e0-f805-5d021f3c02ce" [ 1409.450426] env[65758]: _type = "Task" [ 1409.450426] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.458710] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d7dcf7-2416-b1e0-f805-5d021f3c02ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.871686] env[65758]: DEBUG oslo_concurrency.lockutils [req-d8fe9a1b-eb39-4c91-bb28-df6a9f9119cd req-5b036e82-542a-4c5a-aa0f-86a2465b91ab service nova] Releasing lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1409.961591] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]52d7dcf7-2416-b1e0-f805-5d021f3c02ce, 'name': SearchDatastore_Task, 'duration_secs': 0.00939} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.962396] env[65758]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5919cd19-186d-43b4-9517-1da1173d3b06 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.967815] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1409.967815] env[65758]: value = "session[52f282ba-8d16-d852-9890-43f0b19795c3]527944c3-a752-5950-e452-ddd57124f64d" [ 1409.967815] env[65758]: _type = "Task" [ 1409.967815] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.975577] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527944c3-a752-5950-e452-ddd57124f64d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.479964] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': session[52f282ba-8d16-d852-9890-43f0b19795c3]527944c3-a752-5950-e452-ddd57124f64d, 'name': SearchDatastore_Task, 'duration_secs': 0.009475} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.480329] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1410.480473] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5/7c4e29de-ddcd-4ac5-bd1c-73be4be639b5.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1410.480763] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6aacd3ec-a9a0-4bbc-b45c-da2881af188b {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.488325] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1410.488325] env[65758]: value = "task-4661628" [ 1410.488325] env[65758]: _type = "Task" [ 1410.488325] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.496356] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.999401] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43907} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.001797] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/75a6399b-5100-4c51-b5cf-162bd505a28f/75a6399b-5100-4c51-b5cf-162bd505a28f.vmdk to [datastore2] 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5/7c4e29de-ddcd-4ac5-bd1c-73be4be639b5.vmdk {{(pid=65758) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1411.001797] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Extending root virtual disk to 1048576 {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1411.001797] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b7d5d56-d4be-48ff-9aaa-3c78ae597964 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.008413] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1411.008413] env[65758]: value = "task-4661629" [ 1411.008413] env[65758]: _type = "Task" [ 1411.008413] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.018290] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.519751] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064724} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.519751] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Extended root virtual disk {{(pid=65758) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1411.520483] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f4aed7-e31b-46aa-a27f-8eb29be4aeb9 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.543143] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5/7c4e29de-ddcd-4ac5-bd1c-73be4be639b5.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1411.543449] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da97dee3-5c18-4cb0-80ee-775b9042fb07 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.563150] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1411.563150] env[65758]: value = "task-4661630" [ 1411.563150] env[65758]: _type = "Task" [ 1411.563150] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.571092] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661630, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.074249] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661630, 'name': ReconfigVM_Task, 'duration_secs': 0.264865} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.074558] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5/7c4e29de-ddcd-4ac5-bd1c-73be4be639b5.vmdk or device None with type sparse {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1412.075224] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f9194bb-2eda-43be-92cd-0c3a024c5431 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.082411] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1412.082411] env[65758]: value = "task-4661631" [ 1412.082411] env[65758]: _type = "Task" [ 1412.082411] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.090738] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661631, 'name': Rename_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.593204] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661631, 'name': Rename_Task, 'duration_secs': 0.137532} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.593650] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Powering on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1412.593957] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89247580-480f-44e8-96a4-fd2e1244a62d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.601120] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1412.601120] env[65758]: value = "task-4661632" [ 1412.601120] env[65758]: _type = "Task" [ 1412.601120] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.609727] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.113022] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661632, 'name': PowerOnVM_Task} progress is 100%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.612053] env[65758]: DEBUG oslo_vmware.api [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661632, 'name': PowerOnVM_Task, 'duration_secs': 0.543196} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.612472] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Powered on the VM {{(pid=65758) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1413.612562] env[65758]: INFO nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1413.612692] env[65758]: DEBUG nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Checking state {{(pid=65758) _get_power_state /opt/stack/nova/nova/compute/manager.py:1829}} [ 1413.613477] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c574f9-e9fa-4c2c-9e8d-b53adef90fd4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.134389] env[65758]: INFO nova.compute.manager [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Took 11.90 seconds to build instance. [ 1414.264482] env[65758]: DEBUG nova.compute.manager [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Received event network-changed-def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1414.264643] env[65758]: DEBUG nova.compute.manager [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Refreshing instance network info cache due to event network-changed-def5ecc0-da72-4351-8b10-eaeb7040f661. {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11824}} [ 1414.264973] env[65758]: DEBUG oslo_concurrency.lockutils [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] Acquiring lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.265156] env[65758]: DEBUG oslo_concurrency.lockutils [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] Acquired lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1414.265322] env[65758]: DEBUG nova.network.neutron [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Refreshing network info cache for port def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2101}} [ 1414.635436] env[65758]: DEBUG oslo_concurrency.lockutils [None req-41691343-a11d-4725-8169-accbf5442721 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.407s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.769394] env[65758]: WARNING neutronclient.v2_0.client [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1414.770126] env[65758]: WARNING openstack [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1414.770533] env[65758]: WARNING openstack [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1414.937047] env[65758]: WARNING neutronclient.v2_0.client [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1414.937793] env[65758]: WARNING openstack [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1414.938149] env[65758]: WARNING openstack [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1415.016652] env[65758]: DEBUG nova.network.neutron [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Updated VIF entry in instance network info cache for port def5ecc0-da72-4351-8b10-eaeb7040f661. {{(pid=65758) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3592}} [ 1415.017032] env[65758]: DEBUG nova.network.neutron [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Updating instance_info_cache with network_info: [{"id": "def5ecc0-da72-4351-8b10-eaeb7040f661", "address": "fa:16:3e:34:b3:20", "network": {"id": "a0e768d8-322a-49b5-8d69-e3ed1c94203e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-819386003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f32b2100e0824c56ab852e0d1bb37e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc16c915-cff1-4faa-a529-9773ee9bab7e", "external-id": "nsx-vlan-transportzone-93", "segmentation_id": 93, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdef5ecc0-da", "ovs_interfaceid": "def5ecc0-da72-4351-8b10-eaeb7040f661", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1415.519661] env[65758]: DEBUG oslo_concurrency.lockutils [req-7f745e8c-26b8-44ca-b90e-f7c83309d729 req-191a7e20-6d49-4c5e-82f3-05905c287c43 service nova] Releasing lock "refresh_cache-7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" {{(pid=65758) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1433.478245] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.981843] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1433.982083] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1433.982219] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.982365] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=65758) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1433.983294] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d8e86b-286f-4e49-96df-e2281bdcff9d {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.991857] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4afdcfe-10b3-4e6d-b846-0496ad3e2acd {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.006088] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3bc8dc-0f25-4b6e-b3e5-fdc0afaf407e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.012812] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a16cacb-85e8-4ee2-84dd-edba4bbf9072 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.043877] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180289MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=65758) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1434.044130] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.044290] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.171395] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Instance 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=65758) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1740}} [ 1435.171693] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1435.171756] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=100GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '1', 'num_vm_active': '1', 'num_task_None': '1', 'num_os_type_None': '1', 'num_proj_f32b2100e0824c56ab852e0d1bb37e87': '1', 'io_workload': '0'} {{(pid=65758) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1435.201078] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d358605b-0f7a-4b8e-8a0e-d01354aba231 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.209026] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e23438-8259-4318-bc06-cfeac0045b6a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.238468] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea16a2c-eccc-4949-8504-a97a108e9c0c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.246024] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec16c7b5-bc6f-4304-be24-e5876c343984 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.259522] env[65758]: DEBUG nova.compute.provider_tree [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.762975] env[65758]: DEBUG nova.scheduler.client.report [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1436.268621] env[65758]: DEBUG nova.compute.resource_tracker [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=65758) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1436.269057] env[65758]: DEBUG oslo_concurrency.lockutils [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.225s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1436.269110] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.269248] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Cleaning up deleted instances with incomplete migration {{(pid=65758) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11954}} [ 1436.771796] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.275248] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.275671] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.275671] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.275837] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.276109] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.276291] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.478308] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.478592] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=65758) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 1442.478480] env[65758]: DEBUG oslo_service.periodic_task [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=65758) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.478966] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] Cleaning up deleted instances {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11916}} [ 1442.988903] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] There are 29 instances to clean {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11925}} [ 1442.989202] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 6b11147a-3901-4314-8c9e-0868debce49a] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1443.492842] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 9be7cd63-74b0-475c-9928-12330eb3c54c] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1443.997894] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a125c33a-347c-4522-ac8e-e171fe92757a] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1444.501559] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 8b2cfaf8-dd34-4262-a867-613502a964ca] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1445.005550] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: e505f8e8-0612-4fe7-bcd2-73fdd39458fa] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1445.508971] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: c97f02fc-a244-40e9-97b3-8cbbf516607a] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1446.011908] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 45c9678b-0478-4192-8684-3b6fb0f4831e] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1446.515659] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 2bfca515-f4cb-4781-8423-aebf9477a69b] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1447.019530] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: e5b042e0-3dba-4bfe-9e4d-1d55bcb72742] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1447.523125] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: cf64fbb5-4dfb-448d-be4f-7d4e4fecaf42] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1448.027250] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 0f3ae822-4c4c-4dff-94d4-3416187d6d25] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1448.531258] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: d0cadaac-07dd-4478-a83e-80ba46d103b2] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1449.034512] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: d1918f85-d122-4a84-88b3-f038e8c1149e] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1449.538307] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a9550f72-009c-4143-afe2-887727e5c071] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1450.041801] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 0c64f9ad-33e1-4792-9b44-b088d77c0383] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1450.545459] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 2014e795-5c62-47c2-9574-2f32ba29638d] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1451.049444] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 1f773924-74ee-4151-81ba-d105ce225289] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1451.554849] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: cdc1cfab-4f75-4caf-a4ee-8197af083353] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1452.058406] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: bc10286b-195f-48a2-b16c-f8f925ec7a2a] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1452.562203] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: b5bbff6b-42e9-4938-b4b3-05a9d5826d1c] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1453.065589] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: a81095fb-6fe8-4b24-b763-1da983978460] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1453.437457] env[65758]: DEBUG oslo_concurrency.lockutils [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1453.437765] env[65758]: DEBUG oslo_concurrency.lockutils [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1453.569373] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: ba16e0fe-6748-4d14-bb28-a65d63a2274d] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1453.941530] env[65758]: DEBUG nova.compute.utils [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Using /dev/sd instead of None {{(pid=65758) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1454.073035] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 8d0419d1-c301-4302-80c1-cd0fce7ccba4] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1454.444423] env[65758]: DEBUG oslo_concurrency.lockutils [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1454.576249] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 62ae50af-ff52-4084-8161-1a650eff5247] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1455.079535] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 149655f8-fcf5-4cfe-ab96-1171b9d3b550] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1455.501877] env[65758]: DEBUG oslo_concurrency.lockutils [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1455.502106] env[65758]: DEBUG oslo_concurrency.lockutils [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1455.502353] env[65758]: INFO nova.compute.manager [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Attaching volume 9d5ada5e-1227-4d0b-be15-a71722a3b6ea to /dev/sdb [ 1455.532822] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59566261-6eee-4057-8a0e-9429da167894 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.540548] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e30a4fc-f21d-4d4a-8eb6-7c73389337be {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.554737] env[65758]: DEBUG nova.virt.block_device [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Updating existing volume attachment record: 6f430479-77d0-46ed-ac47-57c4d4154674 {{(pid=65758) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1455.583379] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 47cebd84-f9a1-4997-96aa-c76c5faa8c81] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1456.087328] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 31b7d1ee-58c1-47f3-a862-0bc5cb17addc] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1456.590965] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: afc1eb16-c275-4b3b-a7fe-9938d2241e24] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1457.095068] env[65758]: DEBUG nova.compute.manager [None req-cf1f4660-9add-41f4-abcf-10b4e056cbd4 None None] [instance: 37aadd44-79e8-4479-862f-265549c9d802] Instance has had 0 of 5 cleanup attempts {{(pid=65758) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11929}} [ 1460.100209] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Volume attach. Driver type: vmdk {{(pid=65758) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1460.100473] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910110', 'volume_id': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'name': 'volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7c4e29de-ddcd-4ac5-bd1c-73be4be639b5', 'attached_at': '', 'detached_at': '', 'volume_id': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'serial': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1460.101436] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa1cd3e-9cda-4a9d-a6db-1f446390ca9a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.118583] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6cabf1-a078-4c5a-a9fb-e8e70ae403b1 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.146296] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea/volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1460.146564] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae2a19fa-980c-4083-ace2-b8e707316e2c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.164927] env[65758]: DEBUG oslo_vmware.api [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1460.164927] env[65758]: value = "task-4661635" [ 1460.164927] env[65758]: _type = "Task" [ 1460.164927] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.174152] env[65758]: DEBUG oslo_vmware.api [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.675419] env[65758]: DEBUG oslo_vmware.api [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661635, 'name': ReconfigVM_Task, 'duration_secs': 0.324353} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.675721] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Reconfigured VM instance instance-0000007e to attach disk [datastore2] volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea/volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea.vmdk or device None with type thin {{(pid=65758) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1460.680651] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d93c3ab-e4df-4658-9306-530eac366dd8 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.696675] env[65758]: DEBUG oslo_vmware.api [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1460.696675] env[65758]: value = "task-4661636" [ 1460.696675] env[65758]: _type = "Task" [ 1460.696675] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.705183] env[65758]: DEBUG oslo_vmware.api [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661636, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.207027] env[65758]: DEBUG oslo_vmware.api [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661636, 'name': ReconfigVM_Task, 'duration_secs': 0.151879} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.207375] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910110', 'volume_id': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'name': 'volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7c4e29de-ddcd-4ac5-bd1c-73be4be639b5', 'attached_at': '', 'detached_at': '', 'volume_id': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'serial': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea'} {{(pid=65758) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1462.243694] env[65758]: DEBUG nova.objects.instance [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'flavor' on Instance uuid 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1462.749253] env[65758]: DEBUG oslo_concurrency.lockutils [None req-94cf40ba-ac6d-44ba-9f5f-1655cc11a2ee tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.247s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1462.932629] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1462.932901] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1463.436695] env[65758]: INFO nova.compute.manager [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Detaching volume 9d5ada5e-1227-4d0b-be15-a71722a3b6ea [ 1463.466767] env[65758]: INFO nova.virt.block_device [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Attempting to driver detach volume 9d5ada5e-1227-4d0b-be15-a71722a3b6ea from mountpoint /dev/sdb [ 1463.467027] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Volume detach. Driver type: vmdk {{(pid=65758) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1463.467271] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910110', 'volume_id': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'name': 'volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7c4e29de-ddcd-4ac5-bd1c-73be4be639b5', 'attached_at': '', 'detached_at': '', 'volume_id': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'serial': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1463.468205] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9eb23a-703a-44e4-9ff2-328168985239 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.492226] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db52391-172e-416f-9314-2d8471cfc1ab {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.500324] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2416a1c1-3e72-4b70-8827-4b386f5f27a5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.521955] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338f454f-87e3-4e92-b277-1bbb33388cc5 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.537978] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] The volume has not been displaced from its original location: [datastore2] volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea/volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea.vmdk. No consolidation needed. {{(pid=65758) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1463.543150] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Reconfiguring VM instance instance-0000007e to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1463.543455] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7354901-27bd-439a-8700-61d700331bd7 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.562424] env[65758]: DEBUG oslo_vmware.api [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1463.562424] env[65758]: value = "task-4661637" [ 1463.562424] env[65758]: _type = "Task" [ 1463.562424] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.571316] env[65758]: DEBUG oslo_vmware.api [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661637, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.072986] env[65758]: DEBUG oslo_vmware.api [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661637, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.574582] env[65758]: DEBUG oslo_vmware.api [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661637, 'name': ReconfigVM_Task} progress is 99%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.074684] env[65758]: DEBUG oslo_vmware.api [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661637, 'name': ReconfigVM_Task, 'duration_secs': 1.216583} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.074978] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Reconfigured VM instance instance-0000007e to detach disk 2001 {{(pid=65758) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1465.079589] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bf41ab9-dfc9-4fed-b2a8-0f56b8d2a268 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.095279] env[65758]: DEBUG oslo_vmware.api [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1465.095279] env[65758]: value = "task-4661638" [ 1465.095279] env[65758]: _type = "Task" [ 1465.095279] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.103691] env[65758]: DEBUG oslo_vmware.api [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.607153] env[65758]: DEBUG oslo_vmware.api [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661638, 'name': ReconfigVM_Task, 'duration_secs': 0.13565} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.607535] env[65758]: DEBUG nova.virt.vmwareapi.volumeops [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-910110', 'volume_id': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'name': 'volume-9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7c4e29de-ddcd-4ac5-bd1c-73be4be639b5', 'attached_at': '', 'detached_at': '', 'volume_id': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea', 'serial': '9d5ada5e-1227-4d0b-be15-a71722a3b6ea'} {{(pid=65758) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1466.148135] env[65758]: DEBUG nova.objects.instance [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'flavor' on Instance uuid 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1467.155457] env[65758]: DEBUG oslo_concurrency.lockutils [None req-47b1bfa9-6c3c-4521-8211-f8eb4bca6aea tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.222s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1468.188997] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.189448] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1468.189505] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.189675] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1468.189844] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1468.191997] env[65758]: INFO nova.compute.manager [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Terminating instance [ 1468.695515] env[65758]: DEBUG nova.compute.manager [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Start destroying the instance on the hypervisor. {{(pid=65758) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3202}} [ 1468.695740] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Destroying instance {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1468.696703] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f082db-b5ba-432e-9fbc-de17fc53f13c {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.705155] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Powering off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1468.705426] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cecee77e-742d-4e2a-8d5f-5d7e6cb8eb5f {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.712891] env[65758]: DEBUG oslo_vmware.api [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1468.712891] env[65758]: value = "task-4661639" [ 1468.712891] env[65758]: _type = "Task" [ 1468.712891] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.722208] env[65758]: DEBUG oslo_vmware.api [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.224553] env[65758]: DEBUG oslo_vmware.api [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661639, 'name': PowerOffVM_Task, 'duration_secs': 0.212193} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.224952] env[65758]: DEBUG nova.virt.vmwareapi.vm_util [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Powered off the VM {{(pid=65758) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1469.225020] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Unregistering the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1469.225268] env[65758]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-499ebe77-e847-465a-a299-3f44ece87c91 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.294489] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Unregistered the VM {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1469.294769] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Deleting contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1469.294985] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleting the datastore file [datastore2] 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5 {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1469.295287] env[65758]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7506ab38-159b-47d3-8871-b11ead71914e {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.302930] env[65758]: DEBUG oslo_vmware.api [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for the task: (returnval){ [ 1469.302930] env[65758]: value = "task-4661641" [ 1469.302930] env[65758]: _type = "Task" [ 1469.302930] env[65758]: } to complete. {{(pid=65758) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.311608] env[65758]: DEBUG oslo_vmware.api [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.813970] env[65758]: DEBUG oslo_vmware.api [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Task: {'id': task-4661641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134927} completed successfully. {{(pid=65758) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.814182] env[65758]: DEBUG nova.virt.vmwareapi.ds_util [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleted the datastore file {{(pid=65758) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1469.814364] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Deleted contents of the VM from datastore datastore2 {{(pid=65758) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1469.814539] env[65758]: DEBUG nova.virt.vmwareapi.vmops [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Instance destroyed {{(pid=65758) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1469.814744] env[65758]: INFO nova.compute.manager [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1469.815018] env[65758]: DEBUG oslo.service.backend._common.loopingcall [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=65758) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/_common/loopingcall.py:419}} [ 1469.815260] env[65758]: DEBUG nova.compute.manager [-] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Deallocating network for instance {{(pid=65758) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2328}} [ 1469.815391] env[65758]: DEBUG nova.network.neutron [-] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] deallocate_for_instance() {{(pid=65758) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1894}} [ 1469.815670] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1469.816290] env[65758]: WARNING openstack [-] Disabling service 'block-storage': Encountered an exception attempting to process config for project 'cinder' (service type 'block-storage'): no such option valid_interfaces in group [cinder]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [cinder] [ 1469.816549] env[65758]: WARNING openstack [-] Disabling service 'key-manager': Encountered an exception attempting to process config for project 'barbican' (service type 'key-manager'): no such option valid_interfaces in group [barbican]: oslo_config.cfg.NoSuchOptError: no such option valid_interfaces in group [barbican] [ 1469.852365] env[65758]: WARNING neutronclient.v2_0.client [-] The python binding code in neutronclient is deprecated in favor of OpenstackSDK, please use that as this will be removed in a future release. [ 1470.695313] env[65758]: DEBUG nova.compute.manager [req-10edbcdb-9018-4a6e-94b8-bd662fa31045 req-f383660a-965f-48e2-afe3-30f1942e0d77 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Received event network-vif-deleted-def5ecc0-da72-4351-8b10-eaeb7040f661 {{(pid=65758) external_instance_event /opt/stack/nova/nova/compute/manager.py:11819}} [ 1470.695584] env[65758]: INFO nova.compute.manager [req-10edbcdb-9018-4a6e-94b8-bd662fa31045 req-f383660a-965f-48e2-afe3-30f1942e0d77 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Neutron deleted interface def5ecc0-da72-4351-8b10-eaeb7040f661; detaching it from the instance and deleting it from the info cache [ 1470.695688] env[65758]: DEBUG nova.network.neutron [req-10edbcdb-9018-4a6e-94b8-bd662fa31045 req-f383660a-965f-48e2-afe3-30f1942e0d77 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1471.172765] env[65758]: DEBUG nova.network.neutron [-] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Updating instance_info_cache with network_info: [] {{(pid=65758) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:121}} [ 1471.198062] env[65758]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6485c0aa-7707-4607-8a75-9a1ba78a0887 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.208863] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c56c5e-d3ea-4f06-9382-455bef686a5a {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.235410] env[65758]: DEBUG nova.compute.manager [req-10edbcdb-9018-4a6e-94b8-bd662fa31045 req-f383660a-965f-48e2-afe3-30f1942e0d77 service nova] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Detach interface failed, port_id=def5ecc0-da72-4351-8b10-eaeb7040f661, reason: Instance 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5 could not be found. {{(pid=65758) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1471.677444] env[65758]: INFO nova.compute.manager [-] [instance: 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5] Took 1.86 seconds to deallocate network for instance. [ 1472.184191] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1472.184511] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1472.184758] env[65758]: DEBUG nova.objects.instance [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lazy-loading 'resources' on Instance uuid 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5 {{(pid=65758) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1472.721380] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0cec1e-3cbb-4a4f-a376-bbe0e2c13809 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.728559] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e0abe1-c089-4013-877e-245d7970d536 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.758540] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7503dccf-a204-4c3b-92dc-9fa73cfee967 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.766239] env[65758]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9ae145-4cd6-46ec-abb1-cd059b2e60e4 {{(pid=65758) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.780060] env[65758]: DEBUG nova.compute.provider_tree [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed in ProviderTree for provider: 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 {{(pid=65758) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.283898] env[65758]: DEBUG nova.scheduler.client.report [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Inventory has not changed for provider 0dbdaf1f-971c-4c55-8486-ee9b6f6f7d51 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=65758) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1473.789859] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.605s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1473.810041] env[65758]: INFO nova.scheduler.client.report [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Deleted allocations for instance 7c4e29de-ddcd-4ac5-bd1c-73be4be639b5 [ 1474.318594] env[65758]: DEBUG oslo_concurrency.lockutils [None req-54f8014c-0337-439a-a618-c5fa289d0199 tempest-AttachVolumeNegativeTest-660280753 tempest-AttachVolumeNegativeTest-660280753-project-member] Lock "7c4e29de-ddcd-4ac5-bd1c-73be4be639b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.129s {{(pid=65758) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}